diff --git a/.circleci/config.yml b/.circleci/config.yml index 31fb022b..ce1032fa 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -6,48 +6,38 @@ orbs: jobs: test: - docker: - - image: "cimg/python:<>" - - image: "cimg/node:16.15" - - image: "cimg/redis:6.2" - - image: "cimg/postgres:14.2" - environment: - POSTGRES_DB: speckle2_test - POSTGRES_PASSWORD: speckle - POSTGRES_USER: speckle - - image: "speckle/speckle-server" - command: ["bash", "-c", "/wait && node bin/www"] - environment: - POSTGRES_URL: "127.0.0.1" - POSTGRES_USER: "speckle" - POSTGRES_PASSWORD: "speckle" - POSTGRES_DB: "speckle2_test" - REDIS_URL: "redis://127.0.0.1" - SESSION_SECRET: "keyboard cat" - STRATEGY_LOCAL: "true" - CANONICAL_URL: "http://localhost:3000" - WAIT_HOSTS: 127.0.0.1:5432, 127.0.0.1:6379 - DISABLE_FILE_UPLOADS: "true" + machine: + image: ubuntu-2204:2023.02.1 + docker_layer_caching: true + resource_class: medium parameters: tag: - default: "3.8" + default: "3.11" type: string steps: - checkout - - run: python --version - run: - command: python -m pip install --upgrade pip - name: upgrade pip - - python/install-packages: - pkg-manager: poetry - - run: poetry run pytest --cov --cov-report xml:reports/coverage.xml --junitxml=reports/test-results.xml - + name: Install python + command: | + pyenv install -s << parameters.tag >> + pyenv global << parameters.tag >> + - run: + name: Startup the Speckle Server + command: docker compose -f docker-compose.yml up -d + - run: + name: Install Poetry + command: | + pip install poetry + - run: + name: Install packages + command: poetry install + - run: + name: Run tests + command: poetry run pytest --cov --cov-report xml:reports/coverage.xml --junitxml=reports/test-results.xml - store_test_results: path: reports - - store_artifacts: path: reports - - codecov/upload deploy: @@ -65,7 +55,7 @@ workflows: - test: matrix: parameters: - tag: ["3.7", "3.8", "3.9", "3.10", "3.11"] + tag: ["3.11"] filters: tags: only: /.*/ diff --git a/.github/workflows/close-issue.yml b/.github/workflows/close-issue.yml deleted file mode 100644 index 30e1ebcb..00000000 --- a/.github/workflows/close-issue.yml +++ /dev/null @@ -1,12 +0,0 @@ -name: Update issue Status - -on: - issues: - types: [closed] - -jobs: - update_issue: - uses: specklesystems/github-actions/.github/workflows/project-add-issue.yml@main - secrets: inherit - with: - issue-id: ${{ github.event.issue.node_id }} \ No newline at end of file diff --git a/.github/workflows/open-issue.yml b/.github/workflows/open-issue.yml deleted file mode 100644 index 27fe2b87..00000000 --- a/.github/workflows/open-issue.yml +++ /dev/null @@ -1,12 +0,0 @@ -name: Move new issues into Project - -on: - issues: - types: [opened] - -jobs: - track_issue: - uses: specklesystems/github-actions/.github/workflows/project-add-issue.yml@main - secrets: inherit - with: - issue-id: ${{ github.event.issue.node_id }} \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json index 1a8aba5d..6f682af9 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -5,6 +5,7 @@ "version": "0.2.0", "configurations": [ + { "name": "Python: Current File", "type": "python", @@ -17,8 +18,8 @@ "name": "Pytest", "type": "python", "request": "launch", - "program": "poetry", - "args": ["run", "pytest"], + "program": "pytest", + "args": [], "console": "integratedTerminal", "justMyCode": true } diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..16497424 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,162 @@ +version: "3.9" +name: "speckle-server" + +services: + #### + # Speckle Server dependencies + ####### + postgres: + image: "postgres:14.5-alpine" + restart: always + environment: + POSTGRES_DB: speckle + POSTGRES_USER: speckle + POSTGRES_PASSWORD: speckle + volumes: + - postgres-data:/var/lib/postgresql/data/ + healthcheck: + # the -U user has to match the POSTGRES_USER value + test: ["CMD-SHELL", "pg_isready -U speckle"] + interval: 5s + timeout: 5s + retries: 30 + + redis: + image: "redis:6.0-alpine" + restart: always + volumes: + - redis-data:/data + healthcheck: + test: ["CMD", "redis-cli", "--raw", "incr", "ping"] + interval: 5s + timeout: 5s + retries: 30 + + minio: + image: "minio/minio" + command: server /data --console-address ":9001" + restart: always + volumes: + - minio-data:/data + healthcheck: + test: + [ + "CMD-SHELL", + "curl -s -o /dev/null http://127.0.0.1:9000/minio/index.html", + ] + interval: 5s + timeout: 30s + retries: 30 + start_period: 10s + + #### + # Speckle Server + ####### + speckle-frontend: + image: speckle/speckle-frontend:2 + restart: always + ports: + - "0.0.0.0:8080:8080" + environment: + FILE_SIZE_LIMIT_MB: 100 + + speckle-server: + image: speckle/speckle-server:2 + restart: always + healthcheck: + test: + [ + "CMD", + "node", + "-e", + "require('node:http').request({headers: {'Content-Type': 'application/json'}, port:3000, hostname:'127.0.0.1', path:'/graphql?query={serverInfo{version}}', method: 'GET' }, (res) => { body = ''; res.on('data', (chunk) => {body += chunk;}); res.on('end', () => {process.exit(res.statusCode != 200 || body.toLowerCase().includes('error'));}); }).end();", + ] + interval: 10s + timeout: 3s + retries: 30 + ports: + - "0.0.0.0:3000:3000" + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + minio: + condition: service_healthy + environment: + # TODO: Change this to the URL of the speckle server, as accessed from the network + CANONICAL_URL: "http://127.0.0.1:8080" + SPECKLE_AUTOMATE_URL: "http://127.0.0.1:3030" + + # TODO: Change thvolumes: + REDIS_URL: "redis://redis" + + S3_ENDPOINT: "http://minio:9000" + S3_ACCESS_KEY: "minioadmin" + S3_SECRET_KEY: "minioadmin" + S3_BUCKET: "speckle-server" + S3_CREATE_BUCKET: "true" + + FILE_SIZE_LIMIT_MB: 100 + + # TODO: Change this to a unique secret for this server + SESSION_SECRET: "TODO:ReplaceWithLongString" + + STRATEGY_LOCAL: "true" + DEBUG: "speckle:*" + + POSTGRES_URL: "postgres" + POSTGRES_USER: "speckle" + POSTGRES_PASSWORD: "speckle" + POSTGRES_DB: "speckle" + ENABLE_MP: "false" + + preview-service: + image: speckle/speckle-preview-service:2 + restart: always + depends_on: + speckle-server: + condition: service_healthy + mem_limit: "1000m" + memswap_limit: "1000m" + environment: + DEBUG: "preview-service:*" + PG_CONNECTION_STRING: "postgres://speckle:speckle@postgres/speckle" + + webhook-service: + image: speckle/speckle-webhook-service:2 + restart: always + depends_on: + speckle-server: + condition: service_healthy + environment: + DEBUG: "webhook-service:*" + PG_CONNECTION_STRING: "postgres://speckle:speckle@postgres/speckle" + WAIT_HOSTS: postgres:5432 + + fileimport-service: + image: speckle/speckle-fileimport-service:2 + restart: always + depends_on: + speckle-server: + condition: service_healthy + environment: + DEBUG: "fileimport-service:*" + PG_CONNECTION_STRING: "postgres://speckle:speckle@postgres/speckle" + WAIT_HOSTS: postgres:5432 + + S3_ENDPOINT: "http://minio:9000" + S3_ACCESS_KEY: "minioadmin" + S3_SECRET_KEY: "minioadmin" + S3_BUCKET: "speckle-server" + + SPECKLE_SERVER_URL: "http://speckle-server:3000" + +networks: + default: + name: speckle-server + +volumes: + postgres-data: + redis-data: + minio-data: diff --git a/poetry.lock b/poetry.lock index 3fe86dbb..7cbb0de9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,23 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.5.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.7" +files = [ + {file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"}, + {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} [[package]] name = "appdirs" version = "1.4.4" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "main" optional = false python-versions = "*" files = [ @@ -14,38 +27,37 @@ files = [ [[package]] name = "argcomplete" -version = "2.0.0" +version = "2.0.6" description = "Bash tab completion for argparse" -category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "argcomplete-2.0.0-py2.py3-none-any.whl", hash = "sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e"}, - {file = "argcomplete-2.0.0.tar.gz", hash = "sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20"}, + {file = "argcomplete-2.0.6-py3-none-any.whl", hash = "sha256:6c2170b3e0ab54683cb28d319b65261bde1f11388be688b68118b7d281e34c94"}, + {file = "argcomplete-2.0.6.tar.gz", hash = "sha256:dc33528d96727882b576b24bc89ed038f3c6abbb6855ff9bb6be23384afff9d6"}, ] [package.dependencies] -importlib-metadata = {version = ">=0.23,<5", markers = "python_version == \"3.7\""} +importlib-metadata = {version = ">=0.23,<6", markers = "python_version == \"3.7\""} [package.extras] -test = ["coverage", "flake8", "pexpect", "wheel"] +lint = ["flake8", "mypy"] +test = ["coverage", "flake8", "mypy", "pexpect", "wheel"] [[package]] name = "astroid" -version = "2.12.13" +version = "2.15.6" description = "An abstract syntax tree for Python with inference support." -category = "dev" optional = false python-versions = ">=3.7.2" files = [ - {file = "astroid-2.12.13-py3-none-any.whl", hash = "sha256:10e0ad5f7b79c435179d0d0f0df69998c4eef4597534aae44910db060baeb907"}, - {file = "astroid-2.12.13.tar.gz", hash = "sha256:1493fe8bd3dfd73dc35bd53c9d5b6e49ead98497c47b2307662556a5692d29d7"}, + {file = "astroid-2.15.6-py3-none-any.whl", hash = "sha256:389656ca57b6108f939cf5d2f9a2a825a3be50ba9d589670f393236e0a03b91c"}, + {file = "astroid-2.15.6.tar.gz", hash = "sha256:903f024859b7c7687d7a7f3a3f73b17301f8e42dfd9cc9df9d4418172d3e2dbd"}, ] [package.dependencies] lazy-object-proxy = ">=1.4.0" typed-ast = {version = ">=1.4.0,<2.0", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} -typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} wrapt = [ {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, @@ -55,7 +67,6 @@ wrapt = [ name = "asttokens" version = "2.2.1" description = "Annotate AST trees with source code positions" -category = "dev" optional = false python-versions = "*" files = [ @@ -71,28 +82,29 @@ test = ["astroid", "pytest"] [[package]] name = "attrs" -version = "22.2.0" +version = "23.1.0" description = "Classes Without Boilerplate" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, - {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, ] +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + [package.extras] -cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] -tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] [[package]] name = "backoff" version = "2.2.1" description = "Function decoration for backoff and retry" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -104,7 +116,6 @@ files = [ name = "black" version = "22.12.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -139,21 +150,19 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2022.12.7" +version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, - {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, ] [[package]] name = "cfgv" version = "3.3.1" description = "Validate configuration and produce human readable error messages." -category = "dev" optional = false python-versions = ">=3.6.1" files = [ @@ -165,7 +174,6 @@ files = [ name = "charset-normalizer" version = "2.1.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.6.0" files = [ @@ -178,14 +186,13 @@ unicode-backport = ["unicodedata2"] [[package]] name = "click" -version = "8.1.3" +version = "8.1.7" description = "Composable command line interface toolkit" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -196,7 +203,6 @@ importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -206,14 +212,13 @@ files = [ [[package]] name = "commitizen" -version = "2.39.1" +version = "2.42.1" description = "Python commitizen client tool" -category = "dev" optional = false python-versions = ">=3.6.2,<4.0.0" files = [ - {file = "commitizen-2.39.1-py3-none-any.whl", hash = "sha256:2678c51ed38676435a4ba02e164605b0aacfefcc3f7e0c8d11dd39e367e20577"}, - {file = "commitizen-2.39.1.tar.gz", hash = "sha256:1f4b77a6b6cf43fc75e7fc604081add66026a5031c2a5032b2b9e8202bc57d47"}, + {file = "commitizen-2.42.1-py3-none-any.whl", hash = "sha256:fad7d37cfae361a859b713d4ac591859d5ca03137dd52de4e1bd208f7f45d5dc"}, + {file = "commitizen-2.42.1.tar.gz", hash = "sha256:eac18c7c65587061aac6829534907aeb208405b8230bfd35ec08503c228a7f17"}, ] [package.dependencies] @@ -231,63 +236,71 @@ typing-extensions = ">=4.0.1,<5.0.0" [[package]] name = "coverage" -version = "7.0.3" +version = "7.2.7" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f7c51b6074a8a3063c341953dffe48fd6674f8e4b1d3c8aa8a91f58d6e716a8"}, - {file = "coverage-7.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:628f47eaf66727fc986d3b190d6fa32f5e6b7754a243919d28bc0fd7974c449f"}, - {file = "coverage-7.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e89d5abf86c104de808108a25d171ad646c07eda96ca76c8b237b94b9c71e518"}, - {file = "coverage-7.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75e43c6f4ea4d122dac389aabdf9d4f0e160770a75e63372f88005d90f5bcc80"}, - {file = "coverage-7.0.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49da0ff241827ebb52d5d6d5a36d33b455fa5e721d44689c95df99fd8db82437"}, - {file = "coverage-7.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0bce4ad5bdd0b02e177a085d28d2cea5fc57bb4ba2cead395e763e34cf934eb1"}, - {file = "coverage-7.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f79691335257d60951638dd43576b9bcd6f52baa5c1c2cd07a509bb003238372"}, - {file = "coverage-7.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5722269ed05fbdb94eef431787c66b66260ff3125d1a9afcc00facff8c45adf9"}, - {file = "coverage-7.0.3-cp310-cp310-win32.whl", hash = "sha256:bdbda870e0fda7dd0fe7db7135ca226ec4c1ade8aa76e96614829b56ca491012"}, - {file = "coverage-7.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:e56fae4292e216b8deeee38ace84557b9fa85b52db005368a275427cdabb8192"}, - {file = "coverage-7.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b82343a5bc51627b9d606f0b6b6b9551db7b6311a5dd920fa52a94beae2e8959"}, - {file = "coverage-7.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fd0a8aa431f9b7ad9eb8264f55ef83cbb254962af3775092fb6e93890dea9ca2"}, - {file = "coverage-7.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:112cfead1bd22eada8a8db9ed387bd3e8be5528debc42b5d3c1f7da4ffaf9fb5"}, - {file = "coverage-7.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af87e906355fa42447be5c08c5d44e6e1c005bf142f303f726ddf5ed6e0c8a4d"}, - {file = "coverage-7.0.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f30090e22a301952c5abd0e493a1c8358b4f0b368b49fa3e4568ed3ed68b8d1f"}, - {file = "coverage-7.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ae871d09901911eedda1981ea6fd0f62a999107293cdc4c4fd612321c5b34745"}, - {file = "coverage-7.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ed7c9debf7bfc63c9b9f8b595409237774ff4b061bf29fba6f53b287a2fdeab9"}, - {file = "coverage-7.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:13121fa22dcd2c7b19c5161e3fd725692448f05377b788da4502a383573227b3"}, - {file = "coverage-7.0.3-cp311-cp311-win32.whl", hash = "sha256:037b51ee86bc600f99b3b957c20a172431c35c2ef9c1ca34bc813ab5b51fd9f5"}, - {file = "coverage-7.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:25fde928306034e8deecd5fc91a07432dcc282c8acb76749581a28963c9f4f3f"}, - {file = "coverage-7.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7e8b0642c38b3d3b3c01417643ccc645345b03c32a2e84ef93cdd6844d6fe530"}, - {file = "coverage-7.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18b09811f849cc958d23f733a350a66b54a8de3fed1e6128ba55a5c97ffb6f65"}, - {file = "coverage-7.0.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:349d0b545520e8516f7b4f12373afc705d17d901e1de6a37a20e4ec9332b61f7"}, - {file = "coverage-7.0.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5b38813eee5b4739f505d94247604c72eae626d5088a16dd77b08b8b1724ab3"}, - {file = "coverage-7.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ba9af1218fa01b1f11c72271bc7290b701d11ad4dbc2ae97c445ecacf6858dba"}, - {file = "coverage-7.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c5648c7eec5cf1ba5db1cf2d6c10036a582d7f09e172990474a122e30c841361"}, - {file = "coverage-7.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d0df04495b76a885bfef009f45eebe8fe2fbf815ad7a83dabcf5aced62f33162"}, - {file = "coverage-7.0.3-cp37-cp37m-win32.whl", hash = "sha256:af6cef3796b8068713a48dd67d258dc9a6e2ebc3bd4645bfac03a09672fa5d20"}, - {file = "coverage-7.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:62ef3800c4058844e2e3fa35faa9dd0ccde8a8aba6c763aae50342e00d4479d4"}, - {file = "coverage-7.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:acef7f3a3825a2d218a03dd02f5f3cc7f27aa31d882dd780191d1ad101120d74"}, - {file = "coverage-7.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a530663a361eb27375cec28aea5cd282089b5e4b022ae451c4c3493b026a68a5"}, - {file = "coverage-7.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c58cd6bb46dcb922e0d5792850aab5964433d511b3a020867650f8d930dde4f4"}, - {file = "coverage-7.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f918e9ef4c98f477a5458238dde2a1643aed956c7213873ab6b6b82e32b8ef61"}, - {file = "coverage-7.0.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b865aa679bee7fbd1c55960940dbd3252621dd81468268786c67122bbd15343"}, - {file = "coverage-7.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c5d9b480ebae60fc2cbc8d6865194136bc690538fa542ba58726433bed6e04cc"}, - {file = "coverage-7.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:985ad2af5ec3dbb4fd75d5b0735752c527ad183455520055a08cf8d6794cabfc"}, - {file = "coverage-7.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ca15308ef722f120967af7474ba6a453e0f5b6f331251e20b8145497cf1bc14a"}, - {file = "coverage-7.0.3-cp38-cp38-win32.whl", hash = "sha256:c1cee10662c25c94415bbb987f2ec0e6ba9e8fce786334b10be7e6a7ab958f69"}, - {file = "coverage-7.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:44d6a556de4418f1f3bfd57094b8c49f0408df5a433cf0d253eeb3075261c762"}, - {file = "coverage-7.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e6dcc70a25cb95df0ae33dfc701de9b09c37f7dd9f00394d684a5b57257f8246"}, - {file = "coverage-7.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bf76d79dfaea802f0f28f50153ffbc1a74ae1ee73e480baeda410b4f3e7ab25f"}, - {file = "coverage-7.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88834e5d56d01c141c29deedacba5773fe0bed900b1edc957595a8a6c0da1c3c"}, - {file = "coverage-7.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef001a60e888f8741e42e5aa79ae55c91be73761e4df5e806efca1ddd62fd400"}, - {file = "coverage-7.0.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4959dc506be74e4963bd2c42f7b87d8e4b289891201e19ec551e64c6aa5441f8"}, - {file = "coverage-7.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b791beb17b32ac019a78cfbe6184f992b6273fdca31145b928ad2099435e2fcb"}, - {file = "coverage-7.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b07651e3b9af8f1a092861d88b4c74d913634a7f1f2280fca0ad041ad84e9e96"}, - {file = "coverage-7.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:55e46fa4168ccb7497c9be78627fcb147e06f474f846a10d55feeb5108a24ef0"}, - {file = "coverage-7.0.3-cp39-cp39-win32.whl", hash = "sha256:e3f1cd1cd65695b1540b3cf7828d05b3515974a9d7c7530f762ac40f58a18161"}, - {file = "coverage-7.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:d8249666c23683f74f8f93aeaa8794ac87cc61c40ff70374a825f3352a4371dc"}, - {file = "coverage-7.0.3-pp37.pp38.pp39-none-any.whl", hash = "sha256:b1ffc8f58b81baed3f8962e28c30d99442079b82ce1ec836a1f67c0accad91c1"}, - {file = "coverage-7.0.3.tar.gz", hash = "sha256:d5be4e93acce64f516bf4fd239c0e6118fc913c93fa1a3f52d15bdcc60d97b2d"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, + {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, + {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, + {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, + {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, + {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, + {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, + {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, + {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, + {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, + {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, + {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, + {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, + {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, + {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, + {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, + {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, ] [package.dependencies] @@ -300,7 +313,6 @@ toml = ["tomli"] name = "decli" version = "0.5.2" description = "Minimal, easy-to-use, declarative cli tool" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -310,27 +322,25 @@ files = [ [[package]] name = "deprecated" -version = "1.2.13" +version = "1.2.14" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, - {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, ] [package.dependencies] wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] [[package]] name = "devtools" version = "0.8.0" description = "Python's missing debug print command and other development tools." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -347,14 +357,13 @@ pygments = ["Pygments (>=2.2.0)"] [[package]] name = "dill" -version = "0.3.6" -description = "serialize all of python" -category = "dev" +version = "0.3.7" +description = "serialize all of Python" optional = false python-versions = ">=3.7" files = [ - {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"}, - {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"}, + {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, + {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, ] [package.extras] @@ -362,26 +371,24 @@ graph = ["objgraph (>=1.7.2)"] [[package]] name = "distlib" -version = "0.3.6" +version = "0.3.7" description = "Distribution utilities" -category = "dev" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, - {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, + {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, + {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, ] [[package]] name = "exceptiongroup" -version = "1.1.0" +version = "1.1.3" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"}, - {file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"}, + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] [package.extras] @@ -391,7 +398,6 @@ test = ["pytest (>=6)"] name = "executing" version = "0.10.0" description = "Get the currently executing AST node of a frame, and other information" -category = "dev" optional = false python-versions = "*" files = [ @@ -401,30 +407,28 @@ files = [ [[package]] name = "filelock" -version = "3.9.0" +version = "3.12.2" description = "A platform independent file lock." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"}, - {file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"}, + {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, + {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, ] [package.extras] -docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] [[package]] name = "gql" -version = "3.4.0" +version = "3.4.1" description = "GraphQL client for Python" -category = "main" optional = false python-versions = "*" files = [ - {file = "gql-3.4.0-py2.py3-none-any.whl", hash = "sha256:59c8a0b8f0a2f3b0b2ff970c94de86f82f65cb1da3340bfe57143e5f7ea82f71"}, - {file = "gql-3.4.0.tar.gz", hash = "sha256:ca81aa8314fa88a8c57dd1ce34941278e0c352d762eb721edcba0387829ea7c0"}, + {file = "gql-3.4.1-py2.py3-none-any.whl", hash = "sha256:315624ca0f4d571ef149d455033ebd35e45c1a13f18a059596aeddcea99135cf"}, + {file = "gql-3.4.1.tar.gz", hash = "sha256:11dc5d8715a827f2c2899593439a4f36449db4f0eafa5b1ea63948f8a2f8c545"}, ] [package.dependencies] @@ -432,17 +436,17 @@ backoff = ">=1.11.1,<3.0" graphql-core = ">=3.2,<3.3" requests = {version = ">=2.26,<3", optional = true, markers = "extra == \"requests\""} requests-toolbelt = {version = ">=0.9.1,<1", optional = true, markers = "extra == \"requests\""} -urllib3 = {version = ">=1.26", optional = true, markers = "extra == \"requests\""} +urllib3 = {version = ">=1.26,<2", optional = true, markers = "extra == \"requests\""} websockets = {version = ">=10,<11", optional = true, markers = "python_version > \"3.6\" and extra == \"websockets\""} yarl = ">=1.6,<2.0" [package.extras] aiohttp = ["aiohttp (>=3.7.1,<3.9.0)"] -all = ["aiohttp (>=3.7.1,<3.9.0)", "botocore (>=1.21,<2)", "requests (>=2.26,<3)", "requests-toolbelt (>=0.9.1,<1)", "urllib3 (>=1.26)", "websockets (>=10,<11)", "websockets (>=9,<10)"] +all = ["aiohttp (>=3.7.1,<3.9.0)", "botocore (>=1.21,<2)", "requests (>=2.26,<3)", "requests-toolbelt (>=0.9.1,<1)", "urllib3 (>=1.26,<2)", "websockets (>=10,<11)", "websockets (>=9,<10)"] botocore = ["botocore (>=1.21,<2)"] -dev = ["aiofiles", "aiohttp (>=3.7.1,<3.9.0)", "black (==22.3.0)", "botocore (>=1.21,<2)", "check-manifest (>=0.42,<1)", "flake8 (==3.8.1)", "isort (==4.3.21)", "mock (==4.0.2)", "mypy (==0.910)", "parse (==1.15.0)", "pytest (==6.2.5)", "pytest-asyncio (==0.16.0)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=0.9.1,<1)", "sphinx (>=3.0.0,<4)", "sphinx-argparse (==0.2.5)", "sphinx-rtd-theme (>=0.4,<1)", "types-aiofiles", "types-mock", "types-requests", "urllib3 (>=1.26)", "vcrpy (==4.0.2)", "websockets (>=10,<11)", "websockets (>=9,<10)"] -requests = ["requests (>=2.26,<3)", "requests-toolbelt (>=0.9.1,<1)", "urllib3 (>=1.26)"] -test = ["aiofiles", "aiohttp (>=3.7.1,<3.9.0)", "botocore (>=1.21,<2)", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==6.2.5)", "pytest-asyncio (==0.16.0)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=0.9.1,<1)", "urllib3 (>=1.26)", "vcrpy (==4.0.2)", "websockets (>=10,<11)", "websockets (>=9,<10)"] +dev = ["aiofiles", "aiohttp (>=3.7.1,<3.9.0)", "black (==22.3.0)", "botocore (>=1.21,<2)", "check-manifest (>=0.42,<1)", "flake8 (==3.8.1)", "isort (==4.3.21)", "mock (==4.0.2)", "mypy (==0.910)", "parse (==1.15.0)", "pytest (==6.2.5)", "pytest-asyncio (==0.16.0)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=0.9.1,<1)", "sphinx (>=3.0.0,<4)", "sphinx-argparse (==0.2.5)", "sphinx-rtd-theme (>=0.4,<1)", "types-aiofiles", "types-mock", "types-requests", "urllib3 (>=1.26,<2)", "vcrpy (==4.0.2)", "websockets (>=10,<11)", "websockets (>=9,<10)"] +requests = ["requests (>=2.26,<3)", "requests-toolbelt (>=0.9.1,<1)", "urllib3 (>=1.26,<2)"] +test = ["aiofiles", "aiohttp (>=3.7.1,<3.9.0)", "botocore (>=1.21,<2)", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==6.2.5)", "pytest-asyncio (==0.16.0)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=0.9.1,<1)", "urllib3 (>=1.26,<2)", "vcrpy (==4.0.2)", "websockets (>=10,<11)", "websockets (>=9,<10)"] test-no-transport = ["aiofiles", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==6.2.5)", "pytest-asyncio (==0.16.0)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "vcrpy (==4.0.2)"] websockets = ["websockets (>=10,<11)", "websockets (>=9,<10)"] @@ -450,7 +454,6 @@ websockets = ["websockets (>=10,<11)", "websockets (>=9,<10)"] name = "graphql-core" version = "3.2.3" description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." -category = "main" optional = false python-versions = ">=3.6,<4" files = [ @@ -463,14 +466,13 @@ typing-extensions = {version = ">=4.2,<5", markers = "python_version < \"3.8\""} [[package]] name = "identify" -version = "2.5.12" +version = "2.5.24" description = "File identification library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "identify-2.5.12-py2.py3-none-any.whl", hash = "sha256:e8a400c3062d980243d27ce10455a52832205649bbcaf27ffddb3dfaaf477bad"}, - {file = "identify-2.5.12.tar.gz", hash = "sha256:0bc96b09c838310b6fcfcc61f78a981ea07f94836ef6ef553da5bb5d4745d662"}, + {file = "identify-2.5.24-py2.py3-none-any.whl", hash = "sha256:986dbfb38b1140e763e413e6feb44cd731faf72d1909543178aa79b0e258265d"}, + {file = "identify-2.5.24.tar.gz", hash = "sha256:0aac67d5b4812498056d28a9a512a483f5085cc28640b02b258a59dac34301d4"}, ] [package.extras] @@ -480,7 +482,6 @@ license = ["ukkonen"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -490,14 +491,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "4.13.0" +version = "5.2.0" description = "Read metadata from Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"}, - {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"}, + {file = "importlib_metadata-5.2.0-py3-none-any.whl", hash = "sha256:0eafa39ba42bf225fc00e67f701d71f85aead9f878569caf13c3724f704b970f"}, + {file = "importlib_metadata-5.2.0.tar.gz", hash = "sha256:404d48d62bba0b7a77ff9d405efd91501bef2e67ff4ace0bed40a0cf28c3c7cd"}, ] [package.dependencies] @@ -505,37 +505,35 @@ typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" -category = "dev" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] [[package]] name = "isort" -version = "5.11.4" +version = "5.11.5" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.7.0" files = [ - {file = "isort-5.11.4-py3-none-any.whl", hash = "sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b"}, - {file = "isort-5.11.4.tar.gz", hash = "sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6"}, + {file = "isort-5.11.5-py3-none-any.whl", hash = "sha256:ba1d72fb2595a01c7895a5128f9585a5cc4b6d395f1c8d514989b9a7eb2a8746"}, + {file = "isort-5.11.5.tar.gz", hash = "sha256:6be1f76a507cb2ecf16c7cf14a37e41609ca082330be4e3436a18ef74add55db"}, ] [package.extras] colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile-deprecated-finder = ["pipreqs", "requirementslib"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] plugins = ["setuptools"] requirements-deprecated-finder = ["pip-api", "pipreqs"] @@ -543,7 +541,6 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -561,7 +558,6 @@ i18n = ["Babel (>=2.7)"] name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -605,59 +601,67 @@ files = [ [[package]] name = "markupsafe" -version = "2.1.1" +version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, - {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] [[package]] name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -669,7 +673,6 @@ files = [ name = "multidict" version = "6.0.4" description = "multidict implementation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -753,7 +756,6 @@ files = [ name = "mypy" version = "0.982" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -796,26 +798,24 @@ reports = ["lxml"] [[package]] name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "dev" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] name = "nodeenv" -version = "1.7.0" +version = "1.8.0" description = "Node.js virtual environment builder" -category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" files = [ - {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"}, - {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, ] [package.dependencies] @@ -823,57 +823,53 @@ setuptools = "*" [[package]] name = "packaging" -version = "22.0" +version = "23.1" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"}, - {file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"}, + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, ] [[package]] name = "pathspec" -version = "0.10.3" +version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pathspec-0.10.3-py3-none-any.whl", hash = "sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6"}, - {file = "pathspec-0.10.3.tar.gz", hash = "sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6"}, + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] [[package]] name = "platformdirs" -version = "2.6.2" +version = "3.10.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, - {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, + {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, + {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, ] [package.dependencies] -typing-extensions = {version = ">=4.4", markers = "python_version < \"3.8\""} +typing-extensions = {version = ">=4.7.1", markers = "python_version < \"3.8\""} [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] [[package]] name = "pluggy" -version = "1.0.0" +version = "1.2.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, ] [package.dependencies] @@ -887,7 +883,6 @@ testing = ["pytest", "pytest-benchmark"] name = "pre-commit" version = "2.21.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -905,14 +900,13 @@ virtualenv = ">=20.10.0" [[package]] name = "prompt-toolkit" -version = "3.0.36" +version = "3.0.39" description = "Library for building powerful interactive command lines in Python" -category = "dev" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.36-py3-none-any.whl", hash = "sha256:aa64ad242a462c5ff0363a7b9cfe696c20d55d9fc60c11fd8e632d064804d305"}, - {file = "prompt_toolkit-3.0.36.tar.gz", hash = "sha256:3e163f254bef5a03b146397d7c1963bd3e2812f0964bb9a24e6ec761fd28db63"}, + {file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"}, + {file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"}, ] [package.dependencies] @@ -920,71 +914,154 @@ wcwidth = "*" [[package]] name = "pydantic" -version = "1.10.4" -description = "Data validation and settings management using python type hints" -category = "main" +version = "2.3.0" +description = "Data validation using Python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5635de53e6686fe7a44b5cf25fcc419a0d5e5c1a1efe73d49d48fe7586db854"}, - {file = "pydantic-1.10.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6dc1cc241440ed7ca9ab59d9929075445da6b7c94ced281b3dd4cfe6c8cff817"}, - {file = "pydantic-1.10.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51bdeb10d2db0f288e71d49c9cefa609bca271720ecd0c58009bd7504a0c464c"}, - {file = "pydantic-1.10.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78cec42b95dbb500a1f7120bdf95c401f6abb616bbe8785ef09887306792e66e"}, - {file = "pydantic-1.10.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8775d4ef5e7299a2f4699501077a0defdaac5b6c4321173bcb0f3c496fbadf85"}, - {file = "pydantic-1.10.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:572066051eeac73d23f95ba9a71349c42a3e05999d0ee1572b7860235b850cc6"}, - {file = "pydantic-1.10.4-cp310-cp310-win_amd64.whl", hash = "sha256:7feb6a2d401f4d6863050f58325b8d99c1e56f4512d98b11ac64ad1751dc647d"}, - {file = "pydantic-1.10.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:39f4a73e5342b25c2959529f07f026ef58147249f9b7431e1ba8414a36761f53"}, - {file = "pydantic-1.10.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:983e720704431a6573d626b00662eb78a07148c9115129f9b4351091ec95ecc3"}, - {file = "pydantic-1.10.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75d52162fe6b2b55964fbb0af2ee58e99791a3138588c482572bb6087953113a"}, - {file = "pydantic-1.10.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fdf8d759ef326962b4678d89e275ffc55b7ce59d917d9f72233762061fd04a2d"}, - {file = "pydantic-1.10.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05a81b006be15655b2a1bae5faa4280cf7c81d0e09fcb49b342ebf826abe5a72"}, - {file = "pydantic-1.10.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d88c4c0e5c5dfd05092a4b271282ef0588e5f4aaf345778056fc5259ba098857"}, - {file = "pydantic-1.10.4-cp311-cp311-win_amd64.whl", hash = "sha256:6a05a9db1ef5be0fe63e988f9617ca2551013f55000289c671f71ec16f4985e3"}, - {file = "pydantic-1.10.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:887ca463c3bc47103c123bc06919c86720e80e1214aab79e9b779cda0ff92a00"}, - {file = "pydantic-1.10.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdf88ab63c3ee282c76d652fc86518aacb737ff35796023fae56a65ced1a5978"}, - {file = "pydantic-1.10.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a48f1953c4a1d9bd0b5167ac50da9a79f6072c63c4cef4cf2a3736994903583e"}, - {file = "pydantic-1.10.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a9f2de23bec87ff306aef658384b02aa7c32389766af3c5dee9ce33e80222dfa"}, - {file = "pydantic-1.10.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:cd8702c5142afda03dc2b1ee6bc358b62b3735b2cce53fc77b31ca9f728e4bc8"}, - {file = "pydantic-1.10.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6e7124d6855b2780611d9f5e1e145e86667eaa3bd9459192c8dc1a097f5e9903"}, - {file = "pydantic-1.10.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b53e1d41e97063d51a02821b80538053ee4608b9a181c1005441f1673c55423"}, - {file = "pydantic-1.10.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:55b1625899acd33229c4352ce0ae54038529b412bd51c4915349b49ca575258f"}, - {file = "pydantic-1.10.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:301d626a59edbe5dfb48fcae245896379a450d04baeed50ef40d8199f2733b06"}, - {file = "pydantic-1.10.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6f9d649892a6f54a39ed56b8dfd5e08b5f3be5f893da430bed76975f3735d15"}, - {file = "pydantic-1.10.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d7b5a3821225f5c43496c324b0d6875fde910a1c2933d726a743ce328fbb2a8c"}, - {file = "pydantic-1.10.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f2f7eb6273dd12472d7f218e1fef6f7c7c2f00ac2e1ecde4db8824c457300416"}, - {file = "pydantic-1.10.4-cp38-cp38-win_amd64.whl", hash = "sha256:4b05697738e7d2040696b0a66d9f0a10bec0efa1883ca75ee9e55baf511909d6"}, - {file = "pydantic-1.10.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a9a6747cac06c2beb466064dda999a13176b23535e4c496c9d48e6406f92d42d"}, - {file = "pydantic-1.10.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eb992a1ef739cc7b543576337bebfc62c0e6567434e522e97291b251a41dad7f"}, - {file = "pydantic-1.10.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:990406d226dea0e8f25f643b370224771878142155b879784ce89f633541a024"}, - {file = "pydantic-1.10.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e82a6d37a95e0b1b42b82ab340ada3963aea1317fd7f888bb6b9dfbf4fff57c"}, - {file = "pydantic-1.10.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9193d4f4ee8feca58bc56c8306bcb820f5c7905fd919e0750acdeeeef0615b28"}, - {file = "pydantic-1.10.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2b3ce5f16deb45c472dde1a0ee05619298c864a20cded09c4edd820e1454129f"}, - {file = "pydantic-1.10.4-cp39-cp39-win_amd64.whl", hash = "sha256:9cbdc268a62d9a98c56e2452d6c41c0263d64a2009aac69246486f01b4f594c4"}, - {file = "pydantic-1.10.4-py3-none-any.whl", hash = "sha256:4948f264678c703f3877d1c8877c4e3b2e12e549c57795107f08cf70c6ec7774"}, - {file = "pydantic-1.10.4.tar.gz", hash = "sha256:b9a3859f24eb4e097502a3be1fb4b2abb79b6103dd9e2e0edb70613a4459a648"}, + {file = "pydantic-2.3.0-py3-none-any.whl", hash = "sha256:45b5e446c6dfaad9444819a293b921a40e1db1aa61ea08aede0522529ce90e81"}, + {file = "pydantic-2.3.0.tar.gz", hash = "sha256:1607cc106602284cd4a00882986570472f193fde9cb1259bceeaedb26aa79a6d"}, ] [package.dependencies] -typing-extensions = ">=4.2.0" +annotated-types = ">=0.4.0" +pydantic-core = "2.6.3" +typing-extensions = ">=4.6.1" [package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.6.3" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_core-2.6.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:1a0ddaa723c48af27d19f27f1c73bdc615c73686d763388c8683fe34ae777bad"}, + {file = "pydantic_core-2.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5cfde4fab34dd1e3a3f7f3db38182ab6c95e4ea91cf322242ee0be5c2f7e3d2f"}, + {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5493a7027bfc6b108e17c3383959485087d5942e87eb62bbac69829eae9bc1f7"}, + {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:84e87c16f582f5c753b7f39a71bd6647255512191be2d2dbf49458c4ef024588"}, + {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:522a9c4a4d1924facce7270c84b5134c5cabcb01513213662a2e89cf28c1d309"}, + {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaafc776e5edc72b3cad1ccedb5fd869cc5c9a591f1213aa9eba31a781be9ac1"}, + {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a750a83b2728299ca12e003d73d1264ad0440f60f4fc9cee54acc489249b728"}, + {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e8b374ef41ad5c461efb7a140ce4730661aadf85958b5c6a3e9cf4e040ff4bb"}, + {file = "pydantic_core-2.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b594b64e8568cf09ee5c9501ede37066b9fc41d83d58f55b9952e32141256acd"}, + {file = "pydantic_core-2.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2a20c533cb80466c1d42a43a4521669ccad7cf2967830ac62c2c2f9cece63e7e"}, + {file = "pydantic_core-2.6.3-cp310-none-win32.whl", hash = "sha256:04fe5c0a43dec39aedba0ec9579001061d4653a9b53a1366b113aca4a3c05ca7"}, + {file = "pydantic_core-2.6.3-cp310-none-win_amd64.whl", hash = "sha256:6bf7d610ac8f0065a286002a23bcce241ea8248c71988bda538edcc90e0c39ad"}, + {file = "pydantic_core-2.6.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:6bcc1ad776fffe25ea5c187a028991c031a00ff92d012ca1cc4714087e575973"}, + {file = "pydantic_core-2.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:df14f6332834444b4a37685810216cc8fe1fe91f447332cd56294c984ecbff1c"}, + {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b7486d85293f7f0bbc39b34e1d8aa26210b450bbd3d245ec3d732864009819"}, + {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a892b5b1871b301ce20d40b037ffbe33d1407a39639c2b05356acfef5536d26a"}, + {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:883daa467865e5766931e07eb20f3e8152324f0adf52658f4d302242c12e2c32"}, + {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4eb77df2964b64ba190eee00b2312a1fd7a862af8918ec70fc2d6308f76ac64"}, + {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce8c84051fa292a5dc54018a40e2a1926fd17980a9422c973e3ebea017aa8da"}, + {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22134a4453bd59b7d1e895c455fe277af9d9d9fbbcb9dc3f4a97b8693e7e2c9b"}, + {file = "pydantic_core-2.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:02e1c385095efbd997311d85c6021d32369675c09bcbfff3b69d84e59dc103f6"}, + {file = "pydantic_core-2.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d79f1f2f7ebdb9b741296b69049ff44aedd95976bfee38eb4848820628a99b50"}, + {file = "pydantic_core-2.6.3-cp311-none-win32.whl", hash = "sha256:430ddd965ffd068dd70ef4e4d74f2c489c3a313adc28e829dd7262cc0d2dd1e8"}, + {file = "pydantic_core-2.6.3-cp311-none-win_amd64.whl", hash = "sha256:84f8bb34fe76c68c9d96b77c60cef093f5e660ef8e43a6cbfcd991017d375950"}, + {file = "pydantic_core-2.6.3-cp311-none-win_arm64.whl", hash = "sha256:5a2a3c9ef904dcdadb550eedf3291ec3f229431b0084666e2c2aa8ff99a103a2"}, + {file = "pydantic_core-2.6.3-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:8421cf496e746cf8d6b677502ed9a0d1e4e956586cd8b221e1312e0841c002d5"}, + {file = "pydantic_core-2.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bb128c30cf1df0ab78166ded1ecf876620fb9aac84d2413e8ea1594b588c735d"}, + {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37a822f630712817b6ecc09ccc378192ef5ff12e2c9bae97eb5968a6cdf3b862"}, + {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:240a015102a0c0cc8114f1cba6444499a8a4d0333e178bc504a5c2196defd456"}, + {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f90e5e3afb11268628c89f378f7a1ea3f2fe502a28af4192e30a6cdea1e7d5e"}, + {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:340e96c08de1069f3d022a85c2a8c63529fd88709468373b418f4cf2c949fb0e"}, + {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1480fa4682e8202b560dcdc9eeec1005f62a15742b813c88cdc01d44e85308e5"}, + {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f14546403c2a1d11a130b537dda28f07eb6c1805a43dae4617448074fd49c282"}, + {file = "pydantic_core-2.6.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a87c54e72aa2ef30189dc74427421e074ab4561cf2bf314589f6af5b37f45e6d"}, + {file = "pydantic_core-2.6.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f93255b3e4d64785554e544c1c76cd32f4a354fa79e2eeca5d16ac2e7fdd57aa"}, + {file = "pydantic_core-2.6.3-cp312-none-win32.whl", hash = "sha256:f70dc00a91311a1aea124e5f64569ea44c011b58433981313202c46bccbec0e1"}, + {file = "pydantic_core-2.6.3-cp312-none-win_amd64.whl", hash = "sha256:23470a23614c701b37252618e7851e595060a96a23016f9a084f3f92f5ed5881"}, + {file = "pydantic_core-2.6.3-cp312-none-win_arm64.whl", hash = "sha256:1ac1750df1b4339b543531ce793b8fd5c16660a95d13aecaab26b44ce11775e9"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:a53e3195f134bde03620d87a7e2b2f2046e0e5a8195e66d0f244d6d5b2f6d31b"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:f2969e8f72c6236c51f91fbb79c33821d12a811e2a94b7aa59c65f8dbdfad34a"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:672174480a85386dd2e681cadd7d951471ad0bb028ed744c895f11f9d51b9ebe"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:002d0ea50e17ed982c2d65b480bd975fc41086a5a2f9c924ef8fc54419d1dea3"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ccc13afee44b9006a73d2046068d4df96dc5b333bf3509d9a06d1b42db6d8bf"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:439a0de139556745ae53f9cc9668c6c2053444af940d3ef3ecad95b079bc9987"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d63b7545d489422d417a0cae6f9898618669608750fc5e62156957e609e728a5"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b44c42edc07a50a081672e25dfe6022554b47f91e793066a7b601ca290f71e42"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1c721bfc575d57305dd922e6a40a8fe3f762905851d694245807a351ad255c58"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5e4a2cf8c4543f37f5dc881de6c190de08096c53986381daebb56a355be5dfe6"}, + {file = "pydantic_core-2.6.3-cp37-none-win32.whl", hash = "sha256:d9b4916b21931b08096efed090327f8fe78e09ae8f5ad44e07f5c72a7eedb51b"}, + {file = "pydantic_core-2.6.3-cp37-none-win_amd64.whl", hash = "sha256:a8acc9dedd304da161eb071cc7ff1326aa5b66aadec9622b2574ad3ffe225525"}, + {file = "pydantic_core-2.6.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:5e9c068f36b9f396399d43bfb6defd4cc99c36215f6ff33ac8b9c14ba15bdf6b"}, + {file = "pydantic_core-2.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e61eae9b31799c32c5f9b7be906be3380e699e74b2db26c227c50a5fc7988698"}, + {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85463560c67fc65cd86153a4975d0b720b6d7725cf7ee0b2d291288433fc21b"}, + {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9616567800bdc83ce136e5847d41008a1d602213d024207b0ff6cab6753fe645"}, + {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e9b65a55bbabda7fccd3500192a79f6e474d8d36e78d1685496aad5f9dbd92c"}, + {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f468d520f47807d1eb5d27648393519655eadc578d5dd862d06873cce04c4d1b"}, + {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9680dd23055dd874173a3a63a44e7f5a13885a4cfd7e84814be71be24fba83db"}, + {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a718d56c4d55efcfc63f680f207c9f19c8376e5a8a67773535e6f7e80e93170"}, + {file = "pydantic_core-2.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8ecbac050856eb6c3046dea655b39216597e373aa8e50e134c0e202f9c47efec"}, + {file = "pydantic_core-2.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:788be9844a6e5c4612b74512a76b2153f1877cd845410d756841f6c3420230eb"}, + {file = "pydantic_core-2.6.3-cp38-none-win32.whl", hash = "sha256:07a1aec07333bf5adebd8264047d3dc518563d92aca6f2f5b36f505132399efc"}, + {file = "pydantic_core-2.6.3-cp38-none-win_amd64.whl", hash = "sha256:621afe25cc2b3c4ba05fff53525156d5100eb35c6e5a7cf31d66cc9e1963e378"}, + {file = "pydantic_core-2.6.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:813aab5bfb19c98ae370952b6f7190f1e28e565909bfc219a0909db168783465"}, + {file = "pydantic_core-2.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:50555ba3cb58f9861b7a48c493636b996a617db1a72c18da4d7f16d7b1b9952b"}, + {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19e20f8baedd7d987bd3f8005c146e6bcbda7cdeefc36fad50c66adb2dd2da48"}, + {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b0a5d7edb76c1c57b95df719af703e796fc8e796447a1da939f97bfa8a918d60"}, + {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f06e21ad0b504658a3a9edd3d8530e8cea5723f6ea5d280e8db8efc625b47e49"}, + {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea053cefa008fda40f92aab937fb9f183cf8752e41dbc7bc68917884454c6362"}, + {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:171a4718860790f66d6c2eda1d95dd1edf64f864d2e9f9115840840cf5b5713f"}, + {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ed7ceca6aba5331ece96c0e328cd52f0dcf942b8895a1ed2642de50800b79d3"}, + {file = "pydantic_core-2.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:acafc4368b289a9f291e204d2c4c75908557d4f36bd3ae937914d4529bf62a76"}, + {file = "pydantic_core-2.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1aa712ba150d5105814e53cb141412217146fedc22621e9acff9236d77d2a5ef"}, + {file = "pydantic_core-2.6.3-cp39-none-win32.whl", hash = "sha256:44b4f937b992394a2e81a5c5ce716f3dcc1237281e81b80c748b2da6dd5cf29a"}, + {file = "pydantic_core-2.6.3-cp39-none-win_amd64.whl", hash = "sha256:9b33bf9658cb29ac1a517c11e865112316d09687d767d7a0e4a63d5c640d1b17"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d7050899026e708fb185e174c63ebc2c4ee7a0c17b0a96ebc50e1f76a231c057"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:99faba727727b2e59129c59542284efebbddade4f0ae6a29c8b8d3e1f437beb7"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fa159b902d22b283b680ef52b532b29554ea2a7fc39bf354064751369e9dbd7"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:046af9cfb5384f3684eeb3f58a48698ddab8dd870b4b3f67f825353a14441418"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:930bfe73e665ebce3f0da2c6d64455098aaa67e1a00323c74dc752627879fc67"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:85cc4d105747d2aa3c5cf3e37dac50141bff779545ba59a095f4a96b0a460e70"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b25afe9d5c4f60dcbbe2b277a79be114e2e65a16598db8abee2a2dcde24f162b"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e49ce7dc9f925e1fb010fc3d555250139df61fa6e5a0a95ce356329602c11ea9"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:2dd50d6a1aef0426a1d0199190c6c43ec89812b1f409e7fe44cb0fbf6dfa733c"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6595b0d8c8711e8e1dc389d52648b923b809f68ac1c6f0baa525c6440aa0daa"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ef724a059396751aef71e847178d66ad7fc3fc969a1a40c29f5aac1aa5f8784"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3c8945a105f1589ce8a693753b908815e0748f6279959a4530f6742e1994dcb6"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c8c6660089a25d45333cb9db56bb9e347241a6d7509838dbbd1931d0e19dbc7f"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:692b4ff5c4e828a38716cfa92667661a39886e71136c97b7dac26edef18767f7"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:f1a5d8f18877474c80b7711d870db0eeef9442691fcdb00adabfc97e183ee0b0"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:3796a6152c545339d3b1652183e786df648ecdf7c4f9347e1d30e6750907f5bb"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b962700962f6e7a6bd77e5f37320cabac24b4c0f76afeac05e9f93cf0c620014"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56ea80269077003eaa59723bac1d8bacd2cd15ae30456f2890811efc1e3d4413"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c0ebbebae71ed1e385f7dfd9b74c1cff09fed24a6df43d326dd7f12339ec34"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:252851b38bad3bfda47b104ffd077d4f9604a10cb06fe09d020016a25107bf98"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6656a0ae383d8cd7cc94e91de4e526407b3726049ce8d7939049cbfa426518c8"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9140ded382a5b04a1c030b593ed9bf3088243a0a8b7fa9f071a5736498c5483"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d38bbcef58220f9c81e42c255ef0bf99735d8f11edef69ab0b499da77105158a"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:c9d469204abcca28926cbc28ce98f28e50e488767b084fb3fbdf21af11d3de26"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:48c1ed8b02ffea4d5c9c220eda27af02b8149fe58526359b3c07eb391cb353a2"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b2b1bfed698fa410ab81982f681f5b1996d3d994ae8073286515ac4d165c2e7"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf9d42a71a4d7a7c1f14f629e5c30eac451a6fc81827d2beefd57d014c006c4a"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4292ca56751aebbe63a84bbfc3b5717abb09b14d4b4442cc43fd7c49a1529efd"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7dc2ce039c7290b4ef64334ec7e6ca6494de6eecc81e21cb4f73b9b39991408c"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:615a31b1629e12445c0e9fc8339b41aaa6cc60bd53bf802d5fe3d2c0cda2ae8d"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1fa1f6312fb84e8c281f32b39affe81984ccd484da6e9d65b3d18c202c666149"}, + {file = "pydantic_core-2.6.3.tar.gz", hash = "sha256:1508f37ba9e3ddc0189e6ff4e2228bd2d3c3a4641cbe8c07177162f76ed696c7"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pylint" -version = "2.15.9" +version = "2.17.5" description = "python code static checker" -category = "dev" optional = false python-versions = ">=3.7.2" files = [ - {file = "pylint-2.15.9-py3-none-any.whl", hash = "sha256:349c8cd36aede4d50a0754a8c0218b43323d13d5d88f4b2952ddfe3e169681eb"}, - {file = "pylint-2.15.9.tar.gz", hash = "sha256:18783cca3cfee5b83c6c5d10b3cdb66c6594520ffae61890858fe8d932e1c6b4"}, + {file = "pylint-2.17.5-py3-none-any.whl", hash = "sha256:73995fb8216d3bed149c8d51bba25b2c52a8251a2c8ac846ec668ce38fab5413"}, + {file = "pylint-2.17.5.tar.gz", hash = "sha256:f7b601cbc06fef7e62a754e2b41294c2aa31f1cb659624b9a85bcba29eaf8252"}, ] [package.dependencies] -astroid = ">=2.12.13,<=2.14.0-dev0" +astroid = ">=2.15.6,<=2.17.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, @@ -1003,18 +1080,16 @@ testutils = ["gitpython (>3)"] [[package]] name = "pytest" -version = "7.2.0" +version = "7.4.0" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, + {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, + {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, ] [package.dependencies] -attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} @@ -1024,13 +1099,12 @@ pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" version = "3.0.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1049,7 +1123,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-ordering" version = "0.6" description = "pytest plugin to run your tests in a specific order" -category = "dev" optional = false python-versions = "*" files = [ @@ -1063,59 +1136,57 @@ pytest = "*" [[package]] name = "pyyaml" -version = "6.0" +version = "6.0.1" description = "YAML parser and emitter for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] [[package]] name = "questionary" version = "1.10.0" description = "Python library to build pretty command line user prompts ⭐️" -category = "dev" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -1131,21 +1202,20 @@ docs = ["Sphinx (>=3.3,<4.0)", "sphinx-autobuild (>=2020.9.1,<2021.0.0)", "sphin [[package]] name = "requests" -version = "2.28.1" +version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false -python-versions = ">=3.7, <4" +python-versions = ">=3.7" files = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" +charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] @@ -1155,7 +1225,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-toolbelt" version = "0.10.1" description = "A utility belt for advanced users of python-requests" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1170,7 +1239,6 @@ requests = ">=2.0.1,<3.0.0" name = "ruff" version = "0.0.187" description = "An extremely fast Python linter, written in Rust." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1194,26 +1262,24 @@ files = [ [[package]] name = "setuptools" -version = "65.6.3" +version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, - {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, + {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, + {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1225,7 +1291,6 @@ files = [ name = "stringcase" version = "1.2.0" description = "String case converter." -category = "main" optional = false python-versions = "*" files = [ @@ -1234,14 +1299,13 @@ files = [ [[package]] name = "termcolor" -version = "2.2.0" +version = "2.3.0" description = "ANSI color formatting for output in terminal" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "termcolor-2.2.0-py3-none-any.whl", hash = "sha256:91ddd848e7251200eac969846cbae2dacd7d71c2871e92733289e7e3666f48e7"}, - {file = "termcolor-2.2.0.tar.gz", hash = "sha256:dfc8ac3f350788f23b2947b3e6cfa5a53b630b612e6cd8965a015a776020b99a"}, + {file = "termcolor-2.3.0-py3-none-any.whl", hash = "sha256:3afb05607b89aed0ffe25202399ee0867ad4d3cb4180d98aaf8eefa6a5f7d475"}, + {file = "termcolor-2.3.0.tar.gz", hash = "sha256:b5b08f68937f138fe92f6c089b99f1e2da0ae56c52b78bf7075fd95420fd9a5a"}, ] [package.extras] @@ -1251,7 +1315,6 @@ tests = ["pytest", "pytest-cov"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1261,198 +1324,206 @@ files = [ [[package]] name = "tomlkit" -version = "0.11.6" +version = "0.12.1" description = "Style preserving TOML library" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "tomlkit-0.11.6-py3-none-any.whl", hash = "sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b"}, - {file = "tomlkit-0.11.6.tar.gz", hash = "sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73"}, + {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"}, + {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, ] [[package]] name = "typed-ast" -version = "1.5.4" +version = "1.5.5" description = "a fork of Python 2 and 3 ast modules with type comment support" -category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, - {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, - {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, - {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, - {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, - {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, - {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, - {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, - {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, - {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, + {file = "typed_ast-1.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bc1efe0ce3ffb74784e06460f01a223ac1f6ab31c6bc0376a21184bf5aabe3b"}, + {file = "typed_ast-1.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f7a8c46a8b333f71abd61d7ab9255440d4a588f34a21f126bbfc95f6049e686"}, + {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597fc66b4162f959ee6a96b978c0435bd63791e31e4f410622d19f1686d5e769"}, + {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d41b7a686ce653e06c2609075d397ebd5b969d821b9797d029fccd71fdec8e04"}, + {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5fe83a9a44c4ce67c796a1b466c270c1272e176603d5e06f6afbc101a572859d"}, + {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d5c0c112a74c0e5db2c75882a0adf3133adedcdbfd8cf7c9d6ed77365ab90a1d"}, + {file = "typed_ast-1.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:e1a976ed4cc2d71bb073e1b2a250892a6e968ff02aa14c1f40eba4f365ffec02"}, + {file = "typed_ast-1.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c631da9710271cb67b08bd3f3813b7af7f4c69c319b75475436fcab8c3d21bee"}, + {file = "typed_ast-1.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b445c2abfecab89a932b20bd8261488d574591173d07827c1eda32c457358b18"}, + {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc95ffaaab2be3b25eb938779e43f513e0e538a84dd14a5d844b8f2932593d88"}, + {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61443214d9b4c660dcf4b5307f15c12cb30bdfe9588ce6158f4a005baeb167b2"}, + {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6eb936d107e4d474940469e8ec5b380c9b329b5f08b78282d46baeebd3692dc9"}, + {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e48bf27022897577d8479eaed64701ecaf0467182448bd95759883300ca818c8"}, + {file = "typed_ast-1.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:83509f9324011c9a39faaef0922c6f720f9623afe3fe220b6d0b15638247206b"}, + {file = "typed_ast-1.5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:44f214394fc1af23ca6d4e9e744804d890045d1643dd7e8229951e0ef39429b5"}, + {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:118c1ce46ce58fda78503eae14b7664163aa735b620b64b5b725453696f2a35c"}, + {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be4919b808efa61101456e87f2d4c75b228f4e52618621c77f1ddcaae15904fa"}, + {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:fc2b8c4e1bc5cd96c1a823a885e6b158f8451cf6f5530e1829390b4d27d0807f"}, + {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:16f7313e0a08c7de57f2998c85e2a69a642e97cb32f87eb65fbfe88381a5e44d"}, + {file = "typed_ast-1.5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:2b946ef8c04f77230489f75b4b5a4a6f24c078be4aed241cfabe9cbf4156e7e5"}, + {file = "typed_ast-1.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2188bc33d85951ea4ddad55d2b35598b2709d122c11c75cffd529fbc9965508e"}, + {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0635900d16ae133cab3b26c607586131269f88266954eb04ec31535c9a12ef1e"}, + {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57bfc3cf35a0f2fdf0a88a3044aafaec1d2f24d8ae8cd87c4f58d615fb5b6311"}, + {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fe58ef6a764de7b4b36edfc8592641f56e69b7163bba9f9c8089838ee596bfb2"}, + {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d09d930c2d1d621f717bb217bf1fe2584616febb5138d9b3e8cdd26506c3f6d4"}, + {file = "typed_ast-1.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:d40c10326893ecab8a80a53039164a224984339b2c32a6baf55ecbd5b1df6431"}, + {file = "typed_ast-1.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fd946abf3c31fb50eee07451a6aedbfff912fcd13cf357363f5b4e834cc5e71a"}, + {file = "typed_ast-1.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ed4a1a42df8a3dfb6b40c3d2de109e935949f2f66b19703eafade03173f8f437"}, + {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:045f9930a1550d9352464e5149710d56a2aed23a2ffe78946478f7b5416f1ede"}, + {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:381eed9c95484ceef5ced626355fdc0765ab51d8553fec08661dce654a935db4"}, + {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bfd39a41c0ef6f31684daff53befddae608f9daf6957140228a08e51f312d7e6"}, + {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8c524eb3024edcc04e288db9541fe1f438f82d281e591c548903d5b77ad1ddd4"}, + {file = "typed_ast-1.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:7f58fabdde8dcbe764cef5e1a7fcb440f2463c1bbbec1cf2a86ca7bc1f95184b"}, + {file = "typed_ast-1.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:042eb665ff6bf020dd2243307d11ed626306b82812aba21836096d229fdc6a10"}, + {file = "typed_ast-1.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:622e4a006472b05cf6ef7f9f2636edc51bda670b7bbffa18d26b255269d3d814"}, + {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1efebbbf4604ad1283e963e8915daa240cb4bf5067053cf2f0baadc4d4fb51b8"}, + {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0aefdd66f1784c58f65b502b6cf8b121544680456d1cebbd300c2c813899274"}, + {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:48074261a842acf825af1968cd912f6f21357316080ebaca5f19abbb11690c8a"}, + {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:429ae404f69dc94b9361bb62291885894b7c6fb4640d561179548c849f8492ba"}, + {file = "typed_ast-1.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:335f22ccb244da2b5c296e6f96b06ee9bed46526db0de38d2f0e5a6597b81155"}, + {file = "typed_ast-1.5.5.tar.gz", hash = "sha256:94282f7a354f36ef5dbce0ef3467ebf6a258e370ab33d5b40c249fa996e590dd"}, ] [[package]] name = "types-deprecated" -version = "1.2.9" +version = "1.2.9.3" description = "Typing stubs for Deprecated" -category = "dev" optional = false python-versions = "*" files = [ - {file = "types-Deprecated-1.2.9.tar.gz", hash = "sha256:e04ce58929509865359e91dcc38720123262b4cd68fa2a8a90312d50390bb6fa"}, - {file = "types_Deprecated-1.2.9-py3-none-any.whl", hash = "sha256:53d05621e1d75de537f5a57d93508c8df17e37c07ee60b9fb09d39e1b7586c1e"}, + {file = "types-Deprecated-1.2.9.3.tar.gz", hash = "sha256:ef87327adf3e3c4a4c7d8e06e58f6476710d3466ecfb53c49efb080804a70ef3"}, + {file = "types_Deprecated-1.2.9.3-py3-none-any.whl", hash = "sha256:24da9210763e5e1b3d0d4f6f8bba9ad3bb6af3fe7f6815fc37e3ede4681704f5"}, ] [[package]] name = "types-requests" -version = "2.28.11.7" +version = "2.31.0.2" description = "Typing stubs for requests" -category = "dev" optional = false python-versions = "*" files = [ - {file = "types-requests-2.28.11.7.tar.gz", hash = "sha256:0ae38633734990d019b80f5463dfa164ebd3581998ac8435f526da6fe4d598c3"}, - {file = "types_requests-2.28.11.7-py3-none-any.whl", hash = "sha256:b6a2fca8109f4fdba33052f11ed86102bddb2338519e1827387137fefc66a98b"}, + {file = "types-requests-2.31.0.2.tar.gz", hash = "sha256:6aa3f7faf0ea52d728bb18c0a0d1522d9bfd8c72d26ff6f61bfc3d06a411cf40"}, + {file = "types_requests-2.31.0.2-py3-none-any.whl", hash = "sha256:56d181c85b5925cbc59f4489a57e72a8b2166f18273fd8ba7b6fe0c0b986f12a"}, ] [package.dependencies] -types-urllib3 = "<1.27" +types-urllib3 = "*" [[package]] name = "types-ujson" -version = "5.6.0.0" +version = "5.8.0.1" description = "Typing stubs for ujson" -category = "dev" optional = false python-versions = "*" files = [ - {file = "types-ujson-5.6.0.0.tar.gz", hash = "sha256:1a20cf7946772756736582612e0da5656d2dbeccd24be4c1e97d1e66b072b97e"}, - {file = "types_ujson-5.6.0.0-py3-none-any.whl", hash = "sha256:010b221260c24a915c6e713a83f366b91390766850ec110304de5b20c86b4b11"}, + {file = "types-ujson-5.8.0.1.tar.gz", hash = "sha256:2b14388248ab4cd1f5efa8c464761112597ccd57c0d84238f73631abe0e20cfd"}, + {file = "types_ujson-5.8.0.1-py3-none-any.whl", hash = "sha256:1923f373ba5df0eaa4e3fe5c85dbf4c0b475e2cce3f77f5f4b773347ea1a62c9"}, ] [[package]] name = "types-urllib3" -version = "1.26.25.4" +version = "1.26.25.14" description = "Typing stubs for urllib3" -category = "dev" optional = false python-versions = "*" files = [ - {file = "types-urllib3-1.26.25.4.tar.gz", hash = "sha256:eec5556428eec862b1ac578fb69aab3877995a99ffec9e5a12cf7fbd0cc9daee"}, - {file = "types_urllib3-1.26.25.4-py3-none-any.whl", hash = "sha256:ed6b9e8a8be488796f72306889a06a3fc3cb1aa99af02ab8afb50144d7317e49"}, + {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, + {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, ] [[package]] name = "typing-extensions" -version = "4.4.0" +version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] [[package]] name = "ujson" -version = "5.6.0" +version = "5.7.0" description = "Ultra fast JSON encoder and decoder for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "ujson-5.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b74396a655ac8a5299dcb765b4a17ba706e45c0df95818bcc6c13c4645a1c38e"}, - {file = "ujson-5.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f63535d51e039a984b2fb67ff87057ffe4216d4757c3cedf2fc846af88253cb7"}, - {file = "ujson-5.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4420bfff18ca6aa39cfb22fe35d8aba3811fa1190c4f4e1ad816b0aad72f7e3"}, - {file = "ujson-5.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35423460954d0c61602da734697724e8dd5326a8aa7900123e584b935116203e"}, - {file = "ujson-5.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:169b3fbd1188647c6ce00cb690915526aff86997c89a94c1b50432010ad7ae0f"}, - {file = "ujson-5.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:91000612a2c30f50c6a009e6459a677e5c1972e51b59ecefd6063543dc47a4e9"}, - {file = "ujson-5.6.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b72d4d948749e9c6afcd3d7af9ecc780fccde84e26d275c97273dd83c68a488b"}, - {file = "ujson-5.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aff708a1b9e2d4979f74375ade0bff978be72c8bd90422a756d24d8a46d78059"}, - {file = "ujson-5.6.0-cp310-cp310-win32.whl", hash = "sha256:6ea9024749a41864bffb12da15aace4a3193c03ea97e77b069557aefa342811f"}, - {file = "ujson-5.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:1217326ba80eab1ff3f644f9eee065bd4fcc4e0c068a2f86f851cafd05737169"}, - {file = "ujson-5.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bfb1fdf61763fafc0f8a20becf9cc4287c14fc41c0e14111d28c0d0dfda9ba56"}, - {file = "ujson-5.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fecf83b2ef3cbce4f5cc573df6f6ded565e5e27c1af84038bae5ade306686d82"}, - {file = "ujson-5.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213e41dc501b4a6d029873039da3e45ba7766b9f9eba97ecc4287c371f5403cc"}, - {file = "ujson-5.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad74eb53ee07e76c82f9ef8e7256c33873b81bd1f97a274fdb65ed87c2801f6"}, - {file = "ujson-5.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a68a204386648ec92ae9b526c1ffca528f38221eca70f98b4709390c3204275"}, - {file = "ujson-5.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4be7d865cb5161824e12db71cee83290ab72b3523566371a30d6ba1bd63402a"}, - {file = "ujson-5.6.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:dde59d2f06297fc4e70b2bae6e4a6b3ce89ca89697ab2c41e641abae3be96b0c"}, - {file = "ujson-5.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:551408a5c4306839b4a4f91503c96069204dbef2c7ed91a9dab08874ac1ed679"}, - {file = "ujson-5.6.0-cp311-cp311-win32.whl", hash = "sha256:ceee5aef3e234c7e998fdb52e5236c41e50cdedc116360f7f1874a04829f6490"}, - {file = "ujson-5.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:dd5ccc036b0f4721b98e1c03ccc604e7f3e1db53866ccc92b2add40ace1782f7"}, - {file = "ujson-5.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a66c5a75b46545361271b4cf55560d9ad8bad794dd054a14b3fbb031407948e"}, - {file = "ujson-5.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d0a60c5f065737a81249c819475d001a86da9a41900d888287e34619c9b4851"}, - {file = "ujson-5.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cf04fcc958bb52a6b6c301b780cb9afab3ec68713b17ca5aa423e1f99c2c1cf"}, - {file = "ujson-5.6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24d40e01accbf4f0ba5181c4db1bac83749fdc1a5413466da582529f2a096085"}, - {file = "ujson-5.6.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3f8b9e8c0420ce3dcc193ab6dd5628840ba79ad1b76e1816ac7ca6752c6bf035"}, - {file = "ujson-5.6.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0f0f21157d1a84ad5fb54388f31767cde9c1a48fb29de7ef91d8887fdc2ca92b"}, - {file = "ujson-5.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:82bf24ea72a73c7d77402a7adc954931243e7ec4241d5738ae74894b53944458"}, - {file = "ujson-5.6.0-cp37-cp37m-win32.whl", hash = "sha256:3b49a1014d396b962cb1d6c5f867f88b2c9aa9224c3860ee6ff63b2837a2965b"}, - {file = "ujson-5.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:74671d1bde8c03daeb92abdbd972960978347b1a1d432c4c1b3c9284ce4094cf"}, - {file = "ujson-5.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:72fa6e850831280a46704032721c75155fd41b839ddadabb6068ab218c56a37a"}, - {file = "ujson-5.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:57904e5b49ffe93189349229dcd83f73862ef9bb8517e8f1e62d0ff73f313847"}, - {file = "ujson-5.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61fdf24f7bddc402ce06b25e4bed7bf5ee4f03e23028a0a09116835c21d54888"}, - {file = "ujson-5.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7174e81c137d480abe2f8036e9fb69157e509f2db0bfdee4488eb61dc3f0ff6b"}, - {file = "ujson-5.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a7e4023c79d9a053c0c6b7c6ec50ea0af78381539ab27412e6af8d9410ae555"}, - {file = "ujson-5.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:31288f85db6295ec63e128daff7285bb0bc220935e1b5107bd2d67e2dc687b7e"}, - {file = "ujson-5.6.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f3e651f04b7510fae7d4706a4600cd43457f015df08702ece82a71339fc15c3d"}, - {file = "ujson-5.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:52f536712d16a1f4e0f9d084982c28e11b7e70c397a1059069e4d28d53b3f522"}, - {file = "ujson-5.6.0-cp38-cp38-win32.whl", hash = "sha256:23051f062bb257a87f3e55ea5a055ea98d56f08185fd415b34313268fa4d814e"}, - {file = "ujson-5.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:fb1632b27e12c0b0df62f924c362206daf246a42c0080e959dd465810dc3482e"}, - {file = "ujson-5.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f00dff3bf26bbb96791ceaf51ca95a3f34e2a21985748da855a650c38633b99"}, - {file = "ujson-5.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1b5e233e42f53bbbc6961caeb492986e9f3aeacd30be811467583203873bad2"}, - {file = "ujson-5.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a51cbe614acb5ea8e2006e4fd80b4e8ea7c51ae51e42c75290012f4925a9d6ab"}, - {file = "ujson-5.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2aece7a92dffc9c78787f5f36e47e24b95495812270c27abc2fa430435a931d"}, - {file = "ujson-5.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20d929a27822cb79e034cc5e0bb62daa0257ab197247cb6f35d5149f2f438983"}, - {file = "ujson-5.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7bde16cb18b95a8f68cc48715e4652b394b4fee68cb3f9fee0fd7d26b29a53b6"}, - {file = "ujson-5.6.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bca3c06c3f10ce03fa80b1301dce53765815c2578a24bd141ce4e5769bb7b709"}, - {file = "ujson-5.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e5715b0e2767b1987ceed0066980fc0a53421dd2f197b4f88460d474d6aef4c"}, - {file = "ujson-5.6.0-cp39-cp39-win32.whl", hash = "sha256:a8795de7ceadf84bcef88f947f91900d647eda234a2c6cc89912c25048cc0490"}, - {file = "ujson-5.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b9e9d26600020cf635a4e58763959f5a59f8c70f75d72ebf26ceae94c2efac74"}, - {file = "ujson-5.6.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:798116b88158f13ed687417526100ef353ba4692e0aef8afbc622bd4bf7e9057"}, - {file = "ujson-5.6.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c169e12642f0edf1dde607fb264721b88787b55a6da5fb3824302a9cac6f9405"}, - {file = "ujson-5.6.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2d70b7f0b485f85141bbc518d0581ae96b912d9f8b070eaf68a9beef8eb1e60"}, - {file = "ujson-5.6.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2cb7a4bd91de97b4c8e57fb5289d1e5f3f019723b59d01d79e2df83783dce5a6"}, - {file = "ujson-5.6.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ae723b8308ac17a591bb8be9478b58c2c26fada23fd2211fc323796801ad7ff5"}, - {file = "ujson-5.6.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2a24b9a96364f943a4754fa00b47855d0a01b84ac4b8b11ebf058c8fb68c1f77"}, - {file = "ujson-5.6.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b64d2ac99503a9a5846157631addacc9f74e23f64d5a886fe910e9662660fa10"}, - {file = "ujson-5.6.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fadebaddd3eb71a5c986f0bdc7bb28b072bfc585c141eef37474fc66d1830b0a"}, - {file = "ujson-5.6.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9f4efcac06f45183b6ed8e2321554739a964a02d8aa3089ec343253d86bf2804"}, - {file = "ujson-5.6.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e97af10b6f13a498de197fb852e9242064217c25dfca79ebe7ad0cf2b0dd0cb7"}, - {file = "ujson-5.6.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:355ef5311854936b9edc7f1ce638f8257cb45fb6b9873f6b2d16a715eafc9570"}, - {file = "ujson-5.6.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4277f6b1d24be30b7f87ec5346a87693cbc1e55bbc5877f573381b2250c4dd6"}, - {file = "ujson-5.6.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6f4be832d97836d62ac0c148026ec021f9f36481f38e455b51538fcd949ed2a"}, - {file = "ujson-5.6.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bca074d08f0677f05df8170b25ce6e61db3bcdfda78062444972fa6508dc825f"}, - {file = "ujson-5.6.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:87578ccfc35461c77e73660fb7d89bc577732f671364f442bda9e2c58b571765"}, - {file = "ujson-5.6.0.tar.gz", hash = "sha256:f881e2d8a022e9285aa2eab6ba8674358dbcb2b57fa68618d88d62937ac3ff04"}, + {file = "ujson-5.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5eba5e69e4361ac3a311cf44fa71bc619361b6e0626768a494771aacd1c2f09b"}, + {file = "ujson-5.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aae4d9e1b4c7b61780f0a006c897a4a1904f862fdab1abb3ea8f45bd11aa58f3"}, + {file = "ujson-5.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2e43ccdba1cb5c6d3448eadf6fc0dae7be6c77e357a3abc968d1b44e265866d"}, + {file = "ujson-5.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54384ce4920a6d35fa9ea8e580bc6d359e3eb961fa7e43f46c78e3ed162d56ff"}, + {file = "ujson-5.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24ad1aa7fc4e4caa41d3d343512ce68e41411fb92adf7f434a4d4b3749dc8f58"}, + {file = "ujson-5.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:afff311e9f065a8f03c3753db7011bae7beb73a66189c7ea5fcb0456b7041ea4"}, + {file = "ujson-5.7.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e80f0d03e7e8646fc3d79ed2d875cebd4c83846e129737fdc4c2532dbd43d9e"}, + {file = "ujson-5.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:137831d8a0db302fb6828ee21c67ad63ac537bddc4376e1aab1c8573756ee21c"}, + {file = "ujson-5.7.0-cp310-cp310-win32.whl", hash = "sha256:7df3fd35ebc14dafeea031038a99232b32f53fa4c3ecddb8bed132a43eefb8ad"}, + {file = "ujson-5.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:af4639f684f425177d09ae409c07602c4096a6287027469157bfb6f83e01448b"}, + {file = "ujson-5.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b0f2680ce8a70f77f5d70aaf3f013d53e6af6d7058727a35d8ceb4a71cdd4e9"}, + {file = "ujson-5.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a19fd8e7d8cc58a169bea99fed5666023adf707a536d8f7b0a3c51dd498abf"}, + {file = "ujson-5.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6abb8e6d8f1ae72f0ed18287245f5b6d40094e2656d1eab6d99d666361514074"}, + {file = "ujson-5.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8cd622c069368d5074bd93817b31bdb02f8d818e57c29e206f10a1f9c6337dd"}, + {file = "ujson-5.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14f9082669f90e18e64792b3fd0bf19f2b15e7fe467534a35ea4b53f3bf4b755"}, + {file = "ujson-5.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d7ff6ebb43bc81b057724e89550b13c9a30eda0f29c2f506f8b009895438f5a6"}, + {file = "ujson-5.7.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f7f241488879d91a136b299e0c4ce091996c684a53775e63bb442d1a8e9ae22a"}, + {file = "ujson-5.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5593263a7fcfb934107444bcfba9dde8145b282de0ee9f61e285e59a916dda0f"}, + {file = "ujson-5.7.0-cp311-cp311-win32.whl", hash = "sha256:26c2b32b489c393106e9cb68d0a02e1a7b9d05a07429d875c46b94ee8405bdb7"}, + {file = "ujson-5.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:ed24406454bb5a31df18f0a423ae14beb27b28cdfa34f6268e7ebddf23da807e"}, + {file = "ujson-5.7.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18679484e3bf9926342b1c43a3bd640f93a9eeeba19ef3d21993af7b0c44785d"}, + {file = "ujson-5.7.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ee295761e1c6c30400641f0a20d381633d7622633cdf83a194f3c876a0e4b7e"}, + {file = "ujson-5.7.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b738282e12a05f400b291966630a98d622da0938caa4bc93cf65adb5f4281c60"}, + {file = "ujson-5.7.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00343501dbaa5172e78ef0e37f9ebd08040110e11c12420ff7c1f9f0332d939e"}, + {file = "ujson-5.7.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c0d1f7c3908357ee100aa64c4d1cf91edf99c40ac0069422a4fd5fd23b263263"}, + {file = "ujson-5.7.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a5d2f44331cf04689eafac7a6596c71d6657967c07ac700b0ae1c921178645da"}, + {file = "ujson-5.7.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:16b2254a77b310f118717715259a196662baa6b1f63b1a642d12ab1ff998c3d7"}, + {file = "ujson-5.7.0-cp37-cp37m-win32.whl", hash = "sha256:6faf46fa100b2b89e4db47206cf8a1ffb41542cdd34dde615b2fc2288954f194"}, + {file = "ujson-5.7.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ff0004c3f5a9a6574689a553d1b7819d1a496b4f005a7451f339dc2d9f4cf98c"}, + {file = "ujson-5.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:75204a1dd7ec6158c8db85a2f14a68d2143503f4bafb9a00b63fe09d35762a5e"}, + {file = "ujson-5.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7312731c7826e6c99cdd3ac503cd9acd300598e7a80bcf41f604fee5f49f566c"}, + {file = "ujson-5.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b9dc5a90e2149643df7f23634fe202fed5ebc787a2a1be95cf23632b4d90651"}, + {file = "ujson-5.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6a6961fc48821d84b1198a09516e396d56551e910d489692126e90bf4887d29"}, + {file = "ujson-5.7.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b01a9af52a0d5c46b2c68e3f258fdef2eacaa0ce6ae3e9eb97983f5b1166edb6"}, + {file = "ujson-5.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7316d3edeba8a403686cdcad4af737b8415493101e7462a70ff73dd0609eafc"}, + {file = "ujson-5.7.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ee997799a23227e2319a3f8817ce0b058923dbd31904761b788dc8f53bd3e30"}, + {file = "ujson-5.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dda9aa4c33435147262cd2ea87c6b7a1ca83ba9b3933ff7df34e69fee9fced0c"}, + {file = "ujson-5.7.0-cp38-cp38-win32.whl", hash = "sha256:bea8d30e362180aafecabbdcbe0e1f0b32c9fa9e39c38e4af037b9d3ca36f50c"}, + {file = "ujson-5.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:c96e3b872bf883090ddf32cc41957edf819c5336ab0007d0cf3854e61841726d"}, + {file = "ujson-5.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6411aea4c94a8e93c2baac096fbf697af35ba2b2ed410b8b360b3c0957a952d3"}, + {file = "ujson-5.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d3b3499c55911f70d4e074c626acdb79a56f54262c3c83325ffb210fb03e44d"}, + {file = "ujson-5.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:341f891d45dd3814d31764626c55d7ab3fd21af61fbc99d070e9c10c1190680b"}, + {file = "ujson-5.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f242eec917bafdc3f73a1021617db85f9958df80f267db69c76d766058f7b19"}, + {file = "ujson-5.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3af9f9f22a67a8c9466a32115d9073c72a33ae627b11de6f592df0ee09b98b6"}, + {file = "ujson-5.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4a3d794afbf134df3056a813e5c8a935208cddeae975bd4bc0ef7e89c52f0ce0"}, + {file = "ujson-5.7.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:800bf998e78dae655008dd10b22ca8dc93bdcfcc82f620d754a411592da4bbf2"}, + {file = "ujson-5.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b5ac3d5c5825e30b438ea92845380e812a476d6c2a1872b76026f2e9d8060fc2"}, + {file = "ujson-5.7.0-cp39-cp39-win32.whl", hash = "sha256:cd90027e6d93e8982f7d0d23acf88c896d18deff1903dd96140613389b25c0dd"}, + {file = "ujson-5.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:523ee146cdb2122bbd827f4dcc2a8e66607b3f665186bce9e4f78c9710b6d8ab"}, + {file = "ujson-5.7.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e87cec407ec004cf1b04c0ed7219a68c12860123dfb8902ef880d3d87a71c172"}, + {file = "ujson-5.7.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bab10165db6a7994e67001733f7f2caf3400b3e11538409d8756bc9b1c64f7e8"}, + {file = "ujson-5.7.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b522be14a28e6ac1cf818599aeff1004a28b42df4ed4d7bc819887b9dac915fc"}, + {file = "ujson-5.7.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7592f40175c723c032cdbe9fe5165b3b5903604f774ab0849363386e99e1f253"}, + {file = "ujson-5.7.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ed22f9665327a981f288a4f758a432824dc0314e4195a0eaeb0da56a477da94d"}, + {file = "ujson-5.7.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:adf445a49d9a97a5a4c9bb1d652a1528de09dd1c48b29f79f3d66cea9f826bf6"}, + {file = "ujson-5.7.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64772a53f3c4b6122ed930ae145184ebaed38534c60f3d859d8c3f00911eb122"}, + {file = "ujson-5.7.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35209cb2c13fcb9d76d249286105b4897b75a5e7f0efb0c0f4b90f222ce48910"}, + {file = "ujson-5.7.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90712dfc775b2c7a07d4d8e059dd58636bd6ff1776d79857776152e693bddea6"}, + {file = "ujson-5.7.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0e4e8981c6e7e9e637e637ad8ffe948a09e5434bc5f52ecbb82b4b4cfc092bfb"}, + {file = "ujson-5.7.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:581c945b811a3d67c27566539bfcb9705ea09cb27c4be0002f7a553c8886b817"}, + {file = "ujson-5.7.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d36a807a24c7d44f71686685ae6fbc8793d784bca1adf4c89f5f780b835b6243"}, + {file = "ujson-5.7.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b4257307e3662aa65e2644a277ca68783c5d51190ed9c49efebdd3cbfd5fa44"}, + {file = "ujson-5.7.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea7423d8a2f9e160c5e011119741682414c5b8dce4ae56590a966316a07a4618"}, + {file = "ujson-5.7.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4c592eb91a5968058a561d358d0fef59099ed152cfb3e1cd14eee51a7a93879e"}, + {file = "ujson-5.7.0.tar.gz", hash = "sha256:e788e5d5dcae8f6118ac9b45d0b891a0d55f7ac480eddcb7f07263f2bcf37b23"}, ] [[package]] name = "urllib3" -version = "1.26.13" +version = "1.26.16" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, - {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, + {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, + {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, ] [package.extras] @@ -1462,43 +1533,40 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.17.1" +version = "20.21.1" description = "Virtual Python Environment builder" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "virtualenv-20.17.1-py3-none-any.whl", hash = "sha256:ce3b1684d6e1a20a3e5ed36795a97dfc6af29bc3970ca8dab93e11ac6094b3c4"}, - {file = "virtualenv-20.17.1.tar.gz", hash = "sha256:f8b927684efc6f1cc206c9db297a570ab9ad0e51c16fa9e45487d36d1905c058"}, + {file = "virtualenv-20.21.1-py3-none-any.whl", hash = "sha256:09ddbe1af0c8ed2bb4d6ed226b9e6415718ad18aef9fa0ba023d96b7a8356049"}, + {file = "virtualenv-20.21.1.tar.gz", hash = "sha256:4c104ccde994f8b108163cf9ba58f3d11511d9403de87fb9b4f52bf33dbc8668"}, ] [package.dependencies] distlib = ">=0.3.6,<1" filelock = ">=3.4.1,<4" importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.8\""} -platformdirs = ">=2.4,<3" +platformdirs = ">=2.4,<4" [package.extras] -docs = ["proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-argparse (>=0.3.2)", "sphinx-rtd-theme (>=1)", "towncrier (>=22.8)"] -testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] [[package]] name = "wcwidth" -version = "0.2.5" +version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, + {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, + {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, ] [[package]] name = "websockets" version = "10.4" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1575,160 +1643,169 @@ files = [ [[package]] name = "wrapt" -version = "1.14.1" +version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ - {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, - {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, - {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, - {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, - {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, - {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, - {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, - {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, - {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, - {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, - {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, - {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, - {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, - {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, - {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, - {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, + {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"}, + {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"}, + {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"}, + {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"}, + {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"}, + {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"}, + {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"}, + {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"}, + {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"}, + {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"}, + {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"}, + {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"}, + {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"}, + {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"}, + {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"}, + {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"}, + {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"}, + {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"}, + {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"}, + {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"}, + {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"}, + {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"}, + {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"}, + {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"}, + {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"}, + {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, + {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, ] [[package]] name = "yarl" -version = "1.8.2" +version = "1.9.2" description = "Yet another URL library" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5"}, - {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863"}, - {file = "yarl-1.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3fc056e35fa6fba63248d93ff6e672c096f95f7836938241ebc8260e062832fe"}, - {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58a3c13d1c3005dbbac5c9f0d3210b60220a65a999b1833aa46bd6677c69b08e"}, - {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10b08293cda921157f1e7c2790999d903b3fd28cd5c208cf8826b3b508026996"}, - {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de986979bbd87272fe557e0a8fcb66fd40ae2ddfe28a8b1ce4eae22681728fef"}, - {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c4fcfa71e2c6a3cb568cf81aadc12768b9995323186a10827beccf5fa23d4f8"}, - {file = "yarl-1.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae4d7ff1049f36accde9e1ef7301912a751e5bae0a9d142459646114c70ecba6"}, - {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf071f797aec5b96abfc735ab97da9fd8f8768b43ce2abd85356a3127909d146"}, - {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:74dece2bfc60f0f70907c34b857ee98f2c6dd0f75185db133770cd67300d505f"}, - {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:df60a94d332158b444301c7f569659c926168e4d4aad2cfbf4bce0e8fb8be826"}, - {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:63243b21c6e28ec2375f932a10ce7eda65139b5b854c0f6b82ed945ba526bff3"}, - {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cfa2bbca929aa742b5084fd4663dd4b87c191c844326fcb21c3afd2d11497f80"}, - {file = "yarl-1.8.2-cp310-cp310-win32.whl", hash = "sha256:b05df9ea7496df11b710081bd90ecc3a3db6adb4fee36f6a411e7bc91a18aa42"}, - {file = "yarl-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:24ad1d10c9db1953291f56b5fe76203977f1ed05f82d09ec97acb623a7976574"}, - {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a1fca9588f360036242f379bfea2b8b44cae2721859b1c56d033adfd5893634"}, - {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f37db05c6051eff17bc832914fe46869f8849de5b92dc4a3466cd63095d23dfd"}, - {file = "yarl-1.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77e913b846a6b9c5f767b14dc1e759e5aff05502fe73079f6f4176359d832581"}, - {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0978f29222e649c351b173da2b9b4665ad1feb8d1daa9d971eb90df08702668a"}, - {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388a45dc77198b2460eac0aca1efd6a7c09e976ee768b0d5109173e521a19daf"}, - {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2305517e332a862ef75be8fad3606ea10108662bc6fe08509d5ca99503ac2aee"}, - {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42430ff511571940d51e75cf42f1e4dbdded477e71c1b7a17f4da76c1da8ea76"}, - {file = "yarl-1.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3150078118f62371375e1e69b13b48288e44f6691c1069340081c3fd12c94d5b"}, - {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c15163b6125db87c8f53c98baa5e785782078fbd2dbeaa04c6141935eb6dab7a"}, - {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d04acba75c72e6eb90745447d69f84e6c9056390f7a9724605ca9c56b4afcc6"}, - {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e7fd20d6576c10306dea2d6a5765f46f0ac5d6f53436217913e952d19237efc4"}, - {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:75c16b2a900b3536dfc7014905a128a2bea8fb01f9ee26d2d7d8db0a08e7cb2c"}, - {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6d88056a04860a98341a0cf53e950e3ac9f4e51d1b6f61a53b0609df342cc8b2"}, - {file = "yarl-1.8.2-cp311-cp311-win32.whl", hash = "sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b"}, - {file = "yarl-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8c46d3d89902c393a1d1e243ac847e0442d0196bbd81aecc94fcebbc2fd5857c"}, - {file = "yarl-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ceff9722e0df2e0a9e8a79c610842004fa54e5b309fe6d218e47cd52f791d7ef"}, - {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6b4aca43b602ba0f1459de647af954769919c4714706be36af670a5f44c9c1"}, - {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1684a9bd9077e922300ecd48003ddae7a7474e0412bea38d4631443a91d61077"}, - {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebb78745273e51b9832ef90c0898501006670d6e059f2cdb0e999494eb1450c2"}, - {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3adeef150d528ded2a8e734ebf9ae2e658f4c49bf413f5f157a470e17a4a2e89"}, - {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57a7c87927a468e5a1dc60c17caf9597161d66457a34273ab1760219953f7f4c"}, - {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:efff27bd8cbe1f9bd127e7894942ccc20c857aa8b5a0327874f30201e5ce83d0"}, - {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a783cd344113cb88c5ff7ca32f1f16532a6f2142185147822187913eb989f739"}, - {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:705227dccbe96ab02c7cb2c43e1228e2826e7ead880bb19ec94ef279e9555b5b"}, - {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:34c09b43bd538bf6c4b891ecce94b6fa4f1f10663a8d4ca589a079a5018f6ed7"}, - {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a48f4f7fea9a51098b02209d90297ac324241bf37ff6be6d2b0149ab2bd51b37"}, - {file = "yarl-1.8.2-cp37-cp37m-win32.whl", hash = "sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89"}, - {file = "yarl-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d881d152ae0007809c2c02e22aa534e702f12071e6b285e90945aa3c376463c5"}, - {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5df5e3d04101c1e5c3b1d69710b0574171cc02fddc4b23d1b2813e75f35a30b1"}, - {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7a66c506ec67eb3159eea5096acd05f5e788ceec7b96087d30c7d2865a243918"}, - {file = "yarl-1.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b4fa2606adf392051d990c3b3877d768771adc3faf2e117b9de7eb977741229"}, - {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e21fb44e1eff06dd6ef971d4bdc611807d6bd3691223d9c01a18cec3677939e"}, - {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93202666046d9edadfe9f2e7bf5e0782ea0d497b6d63da322e541665d65a044e"}, - {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1"}, - {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dd68a92cab699a233641f5929a40f02a4ede8c009068ca8aa1fe87b8c20ae3"}, - {file = "yarl-1.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b372aad2b5f81db66ee7ec085cbad72c4da660d994e8e590c997e9b01e44901"}, - {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e6f3515aafe0209dd17fb9bdd3b4e892963370b3de781f53e1746a521fb39fc0"}, - {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dfef7350ee369197106805e193d420b75467b6cceac646ea5ed3049fcc950a05"}, - {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:728be34f70a190566d20aa13dc1f01dc44b6aa74580e10a3fb159691bc76909d"}, - {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c"}, - {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf211dcad448a87a0d9047dc8282d7de59473ade7d7fdf22150b1d23859f946"}, - {file = "yarl-1.8.2-cp38-cp38-win32.whl", hash = "sha256:272b4f1599f1b621bf2aabe4e5b54f39a933971f4e7c9aa311d6d7dc06965165"}, - {file = "yarl-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:326dd1d3caf910cd26a26ccbfb84c03b608ba32499b5d6eeb09252c920bcbe4f"}, - {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f8ca8ad414c85bbc50f49c0a106f951613dfa5f948ab69c10ce9b128d368baf8"}, - {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:418857f837347e8aaef682679f41e36c24250097f9e2f315d39bae3a99a34cbf"}, - {file = "yarl-1.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae0eec05ab49e91a78700761777f284c2df119376e391db42c38ab46fd662b77"}, - {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87"}, - {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3edac5d74bb3209c418805bda77f973117836e1de7c000e9755e572c1f7850d0"}, - {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da65c3f263729e47351261351b8679c6429151ef9649bba08ef2528ff2c423b2"}, - {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef8fb25e52663a1c85d608f6dd72e19bd390e2ecaf29c17fb08f730226e3a08"}, - {file = "yarl-1.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcd7bb1e5c45274af9a1dd7494d3c52b2be5e6bd8d7e49c612705fd45420b12d"}, - {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44ceac0450e648de86da8e42674f9b7077d763ea80c8ceb9d1c3e41f0f0a9951"}, - {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:97209cc91189b48e7cfe777237c04af8e7cc51eb369004e061809bcdf4e55220"}, - {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:48dd18adcf98ea9cd721a25313aef49d70d413a999d7d89df44f469edfb38a06"}, - {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e59399dda559688461762800d7fb34d9e8a6a7444fd76ec33220a926c8be1516"}, - {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d617c241c8c3ad5c4e78a08429fa49e4b04bedfc507b34b4d8dceb83b4af3588"}, - {file = "yarl-1.8.2-cp39-cp39-win32.whl", hash = "sha256:cb6d48d80a41f68de41212f3dfd1a9d9898d7841c8f7ce6696cf2fd9cb57ef83"}, - {file = "yarl-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:6604711362f2dbf7160df21c416f81fac0de6dbcf0b5445a2ef25478ecc4c778"}, - {file = "yarl-1.8.2.tar.gz", hash = "sha256:49d43402c6e3013ad0978602bf6bf5328535c48d192304b91b97a3c6790b1562"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, + {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, + {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, + {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, + {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, + {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, + {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, + {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, + {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, + {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, + {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, + {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, + {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, ] [package.dependencies] @@ -1738,21 +1815,20 @@ typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} [[package]] name = "zipp" -version = "3.11.0" +version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "zipp-3.11.0-py3-none-any.whl", hash = "sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa"}, - {file = "zipp-3.11.0.tar.gz", hash = "sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766"}, + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "2.0" python-versions = ">=3.7.2, <4.0" -content-hash = "08a9cab6af36721f5cf65834281b641f93e60978fc54340f222c833a88aa5bb3" +content-hash = "68b9e18f308a957906221e252919436a88f414a96bba50969afaa73293caa7ea" diff --git a/pyproject.toml b/pyproject.toml index 47cc2781..923d39f2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,12 +15,13 @@ packages = [ [tool.poetry.dependencies] python = ">=3.7.2, <4.0" -pydantic = "^1.9" +pydantic = "^2.0" appdirs = "^1.4.4" gql = {extras = ["requests", "websockets"], version = "^3.3.0"} ujson = "^5.3.0" Deprecated = "^1.2.13" stringcase = "^1.2.0" +attrs = "^23.1.0" [tool.poetry.group.dev.dependencies] black = "^22.8.0" diff --git a/src/specklepy/api/client.py b/src/specklepy/api/client.py index 40207da3..ba63fab3 100644 --- a/src/specklepy/api/client.py +++ b/src/specklepy/api/client.py @@ -4,12 +4,14 @@ from deprecated import deprecated from gql import Client +from gql.transport.exceptions import TransportServerError from gql.transport.requests import RequestsHTTPTransport from gql.transport.websockets import WebsocketsTransport from specklepy.api import resources from specklepy.api.credentials import Account, get_account_from_token from specklepy.api.resources import ( + user, active_user, branch, commit, @@ -18,13 +20,14 @@ server, stream, subscriptions, - user, ) from specklepy.logging import metrics from specklepy.logging.exceptions import SpeckleException, SpeckleWarning +from specklepy.core.api.client import SpeckleClient as CoreSpeckleClient + -class SpeckleClient: +class SpeckleClient(CoreSpeckleClient): """ The `SpeckleClient` is your entry point for interacting with your Speckle Server's GraphQL API. @@ -59,121 +62,11 @@ class SpeckleClient: USE_SSL = True def __init__(self, host: str = DEFAULT_HOST, use_ssl: bool = USE_SSL) -> None: - metrics.track(metrics.CLIENT, custom_props={"name": "create"}) - ws_protocol = "ws" - http_protocol = "http" - - if use_ssl: - ws_protocol = "wss" - http_protocol = "https" - - # sanitise host input by removing protocol and trailing slash - host = re.sub(r"((^\w+:|^)\/\/)|(\/$)", "", host) - - self.url = f"{http_protocol}://{host}" - self.graphql = f"{self.url}/graphql" - self.ws_url = f"{ws_protocol}://{host}/graphql" - self.account = Account() - - self.httpclient = Client( - transport=RequestsHTTPTransport(url=self.graphql, verify=True, retries=3) - ) - self.wsclient = None - - self._init_resources() - - # ? Check compatibility with the server - i think we can skip this at this point? save a request - # try: - # server_info = self.server.get() - # if isinstance(server_info, Exception): - # raise server_info - # if not isinstance(server_info, ServerInfo): - # raise Exception("Couldn't get ServerInfo") - # except Exception as ex: - # raise SpeckleException( - # f"{self.url} is not a compatible Speckle Server", ex - # ) from ex - - def __repr__(self): - return ( - f"SpeckleClient( server: {self.url}, authenticated:" - f" {self.account.token is not None} )" + super().__init__( + host=host, + use_ssl=use_ssl, ) - - @deprecated( - version="2.6.0", - reason=( - "Renamed: please use `authenticate_with_account` or" - " `authenticate_with_token` instead." - ), - ) - def authenticate(self, token: str) -> None: - """Authenticate the client using a personal access token - The token is saved in the client object and a synchronous GraphQL - entrypoint is created - - Arguments: - token {str} -- an api token - """ - self.authenticate_with_token(token) - self._set_up_client() - - def authenticate_with_token(self, token: str) -> None: - """ - Authenticate the client using a personal access token. - The token is saved in the client object and a synchronous GraphQL - entrypoint is created - - Arguments: - token {str} -- an api token - """ - self.account = get_account_from_token(token, self.url) - metrics.track(metrics.CLIENT, self.account, {"name": "authenticate with token"}) - self._set_up_client() - - def authenticate_with_account(self, account: Account) -> None: - """Authenticate the client using an Account object - The account is saved in the client object and a synchronous GraphQL - entrypoint is created - - Arguments: - account {Account} -- the account object which can be found with - `get_default_account` or `get_local_accounts` - """ - metrics.track(metrics.CLIENT, account, {"name": "authenticate with account"}) - self.account = account - self._set_up_client() - - def _set_up_client(self) -> None: - metrics.track(metrics.CLIENT, self.account, {"name": "set up client"}) - headers = { - "Authorization": f"Bearer {self.account.token}", - "Content-Type": "application/json", - "apollographql-client-name": metrics.HOST_APP, - "apollographql-client-version": metrics.HOST_APP_VERSION, - } - httptransport = RequestsHTTPTransport( - url=self.graphql, headers=headers, verify=True, retries=3 - ) - wstransport = WebsocketsTransport( - url=self.ws_url, - init_payload={"Authorization": f"Bearer {self.account.token}"}, - ) - self.httpclient = Client(transport=httptransport) - self.wsclient = Client(transport=wstransport) - - self._init_resources() - - if self.user.get() is None: - warn( - SpeckleWarning( - "Possibly invalid token - could not authenticate Speckle Client" - f" for server {self.url}" - ) - ) - - def execute_query(self, query: str) -> Dict: - return self.httpclient.execute(query) + self.account = Account() def _init_resources(self) -> None: self.server = server.Resource( @@ -223,13 +116,44 @@ def _init_resources(self) -> None: client=self.wsclient, ) - def __getattr__(self, name): - try: - attr = getattr(resources, name) - return attr.Resource( - account=self.account, basepath=self.url, client=self.httpclient - ) - except AttributeError: - raise SpeckleException( - f"Method {name} is not supported by the SpeckleClient class" - ) + @deprecated( + version="2.6.0", + reason=( + "Renamed: please use `authenticate_with_account` or" + " `authenticate_with_token` instead." + ), + ) + def authenticate(self, token: str) -> None: + """Authenticate the client using a personal access token + The token is saved in the client object and a synchronous GraphQL + entrypoint is created + + Arguments: + token {str} -- an api token + """ + metrics.track(metrics.SDK, self.account, {"name": "Client Authenticate_deprecated"}) + return super().authenticate(token) + + def authenticate_with_token(self, token: str) -> None: + """ + Authenticate the client using a personal access token. + The token is saved in the client object and a synchronous GraphQL + entrypoint is created + + Arguments: + token {str} -- an api token + """ + metrics.track(metrics.SDK, self.account, {"name": "Client Authenticate With Token"}) + return super().authenticate_with_token(token) + + def authenticate_with_account(self, account: Account) -> None: + """Authenticate the client using an Account object + The account is saved in the client object and a synchronous GraphQL + entrypoint is created + + Arguments: + account {Account} -- the account object which can be found with + `get_default_account` or `get_local_accounts` + """ + metrics.track(metrics.SDK, self.account, {"name": "Client Authenticate With Account"}) + return super().authenticate_with_account(account) diff --git a/src/specklepy/api/credentials.py b/src/specklepy/api/credentials.py index 9d504411..e43f6c2e 100644 --- a/src/specklepy/api/credentials.py +++ b/src/specklepy/api/credentials.py @@ -9,36 +9,12 @@ from specklepy.logging.exceptions import SpeckleException from specklepy.transports.sqlite import SQLiteTransport - -class UserInfo(BaseModel): - name: Optional[str] = None - email: Optional[str] = None - company: Optional[str] = None - id: Optional[str] = None - - -class Account(BaseModel): - isDefault: bool = False - token: Optional[str] = None - refreshToken: Optional[str] = None - serverInfo: ServerInfo = Field(default_factory=ServerInfo) - userInfo: UserInfo = Field(default_factory=UserInfo) - id: Optional[str] = None - - def __repr__(self) -> str: - return ( - f"Account(email: {self.userInfo.email}, server: {self.serverInfo.url}," - f" isDefault: {self.isDefault})" - ) - - def __str__(self) -> str: - return self.__repr__() - - @classmethod - def from_token(cls, token: str, server_url: str = None): - acct = cls(token=token) - acct.serverInfo.url = server_url - return acct +# following imports seem to be unnecessary, but they need to stay +# to not break the scripts using these functions as non-core +from specklepy.core.api.credentials import (Account, UserInfo, + StreamWrapper, # deprecated + get_local_accounts as core_get_local_accounts, + get_account_from_token as core_get_account_from_token) def get_local_accounts(base_path: Optional[str] = None) -> List[Account]: @@ -51,53 +27,19 @@ def get_local_accounts(base_path: Optional[str] = None) -> List[Account]: List[Account] -- list of all local accounts or an empty list if no accounts were found """ - accounts: List[Account] = [] - try: - account_storage = SQLiteTransport(scope="Accounts", base_path=base_path) - res = account_storage.get_all_objects() - account_storage.close() - if res: - accounts.extend(Account.parse_raw(r[1]) for r in res) - except SpeckleException: - # cannot open SQLiteTransport, probably because of the lack - # of disk write permissions - pass - - json_acct_files = [] - json_path = str(speckle_path_provider.accounts_folder_path()) - try: - os.makedirs(json_path, exist_ok=True) - json_acct_files.extend( - file for file in os.listdir(json_path) if file.endswith(".json") - ) - - except Exception: - # cannot find or get the json account paths - pass - - if json_acct_files: - try: - accounts.extend( - Account.parse_file(os.path.join(json_path, json_file)) - for json_file in json_acct_files - ) - except Exception as ex: - raise SpeckleException( - "Invalid json accounts could not be read. Please fix or remove them.", - ex, - ) from ex + accounts = core_get_local_accounts(base_path) metrics.track( - metrics.ACCOUNTS, + metrics.SDK, next( (acc for acc in accounts if acc.isDefault), accounts[0] if accounts else None, ), + {"name": "Get Local Accounts"} ) return accounts - def get_default_account(base_path: Optional[str] = None) -> Optional[Account]: """ Gets this environment's default account if any. If there is no default, @@ -108,7 +50,7 @@ def get_default_account(base_path: Optional[str] = None) -> Optional[Account]: Returns: Account -- the default account or None if no local accounts were found """ - accounts = get_local_accounts(base_path=base_path) + accounts = core_get_local_accounts(base_path=base_path) if not accounts: return None @@ -119,8 +61,7 @@ def get_default_account(base_path: Optional[str] = None) -> Optional[Account]: metrics.initialise_tracker(default) return default - - + def get_account_from_token(token: str, server_url: str = None) -> Account: """Gets the local account for the token if it exists Arguments: @@ -130,31 +71,7 @@ def get_account_from_token(token: str, server_url: str = None) -> Account: Account -- the local account with this token or a shell account containing just the token and url if no local account is found """ - accounts = get_local_accounts() - if not accounts: - return Account.from_token(token, server_url) - - acct = next((acc for acc in accounts if acc.token == token), None) - if acct: - return acct - - if server_url: - url = server_url.lower() - acct = next( - (acc for acc in accounts if url in acc.serverInfo.url.lower()), None - ) - if acct: - return acct - - return Account.from_token(token, server_url) - + account = core_get_account_from_token(token, server_url) -class StreamWrapper: - def __init__(self, url: str = None) -> None: - raise SpeckleException( - message=( - "The StreamWrapper has moved as of v2.6.0! Please import from" - " specklepy.api.wrapper" - ), - exception=DeprecationWarning(), - ) + metrics.track( metrics.SDK, account, {"name": "Get Account From Token"} ) + return account diff --git a/src/specklepy/api/host_applications.py b/src/specklepy/api/host_applications.py index 0a362832..8889e743 100644 --- a/src/specklepy/api/host_applications.py +++ b/src/specklepy/api/host_applications.py @@ -2,115 +2,17 @@ from enum import Enum from unicodedata import name - -class HostAppVersion(Enum): - v = "v" - v6 = "v6" - v7 = "v7" - v2019 = "v2019" - v2020 = "v2020" - v2021 = "v2021" - v2022 = "v2022" - v2023 = "v2023" - v2024 = "v2024" - v2025 = "v2025" - vSandbox = "vSandbox" - vRevit = "vRevit" - vRevit2021 = "vRevit2021" - vRevit2022 = "vRevit2022" - vRevit2023 = "vRevit2023" - vRevit2024 = "vRevit2024" - vRevit2025 = "vRevit2025" - v25 = "v25" - v26 = "v26" - - def __repr__(self) -> str: - return self.value - - def __str__(self) -> str: - return self.value - - -@dataclass -class HostApplication: - name: str - slug: str - - def get_version(self, version: HostAppVersion) -> str: - return f"{name.replace(' ', '')}{str(version).strip('v')}" - - -RHINO = HostApplication("Rhino", "rhino") -GRASSHOPPER = HostApplication("Grasshopper", "grasshopper") -REVIT = HostApplication("Revit", "revit") -DYNAMO = HostApplication("Dynamo", "dynamo") -UNITY = HostApplication("Unity", "unity") -GSA = HostApplication("GSA", "gsa") -CIVIL = HostApplication("Civil 3D", "civil3d") -AUTOCAD = HostApplication("AutoCAD", "autocad") -MICROSTATION = HostApplication("MicroStation", "microstation") -OPENROADS = HostApplication("OpenRoads", "openroads") -OPENRAIL = HostApplication("OpenRail", "openrail") -OPENBUILDINGS = HostApplication("OpenBuildings", "openbuildings") -ETABS = HostApplication("ETABS", "etabs") -SAP2000 = HostApplication("SAP2000", "sap2000") -CSIBRIDGE = HostApplication("CSIBridge", "csibridge") -SAFE = HostApplication("SAFE", "safe") -TEKLASTRUCTURES = HostApplication("Tekla Structures", "teklastructures") -DXF = HostApplication("DXF Converter", "dxf") -EXCEL = HostApplication("Excel", "excel") -UNREAL = HostApplication("Unreal", "unreal") -POWERBI = HostApplication("Power BI", "powerbi") -BLENDER = HostApplication("Blender", "blender") -QGIS = HostApplication("QGIS", "qgis") -ARCGIS = HostApplication("ArcGIS", "arcgis") -SKETCHUP = HostApplication("SketchUp", "sketchup") -ARCHICAD = HostApplication("Archicad", "archicad") -TOPSOLID = HostApplication("TopSolid", "topsolid") -PYTHON = HostApplication("Python", "python") -NET = HostApplication(".NET", "net") -OTHER = HostApplication("Other", "other") - -_app_name_host_app_mapping = { - "dynamo": DYNAMO, - "revit": REVIT, - "autocad": AUTOCAD, - "civil": CIVIL, - "rhino": RHINO, - "grasshopper": GRASSHOPPER, - "unity": UNITY, - "gsa": GSA, - "microstation": MICROSTATION, - "openroads": OPENROADS, - "openrail": OPENRAIL, - "openbuildings": OPENBUILDINGS, - "etabs": ETABS, - "sap": SAP2000, - "csibridge": CSIBRIDGE, - "safe": SAFE, - "teklastructures": TEKLASTRUCTURES, - "dxf": DXF, - "excel": EXCEL, - "unreal": UNREAL, - "powerbi": POWERBI, - "blender": BLENDER, - "qgis": QGIS, - "arcgis": ARCGIS, - "sketchup": SKETCHUP, - "archicad": ARCHICAD, - "topsolid": TOPSOLID, - "python": PYTHON, - "net": NET, -} - - -def get_host_app_from_string(app_name: str) -> HostApplication: - app_name = app_name.lower().replace(" ", "") - for partial_app_name, host_app in _app_name_host_app_mapping.items(): - if partial_app_name in app_name: - return host_app - return HostApplication(app_name, app_name) - +# following imports seem to be unnecessary, but they need to stay +# to not break the scripts using these functions as non-core +from specklepy.core.api.host_applications import (HostApplication, HostAppVersion, + get_host_app_from_string, + _app_name_host_app_mapping, + RHINO,GRASSHOPPER,REVIT,DYNAMO,UNITY,GSA, + CIVIL,AUTOCAD,MICROSTATION,OPENROADS, + OPENRAIL,OPENBUILDINGS,ETABS,SAP2000,CSIBRIDGE, + SAFE,TEKLASTRUCTURES,DXF,EXCEL,UNREAL,POWERBI, + BLENDER,QGIS,ARCGIS,SKETCHUP,ARCHICAD,TOPSOLID, + PYTHON,NET,OTHER) if __name__ == "__main__": print(HostAppVersion.v) diff --git a/src/specklepy/api/models.py b/src/specklepy/api/models.py index defec361..8640d21a 100644 --- a/src/specklepy/api/models.py +++ b/src/specklepy/api/models.py @@ -3,196 +3,10 @@ from pydantic import BaseModel, Field - -class Collaborator(BaseModel): - id: Optional[str] - name: Optional[str] - role: Optional[str] - avatar: Optional[str] - - -class Commit(BaseModel): - id: Optional[str] - message: Optional[str] - authorName: Optional[str] - authorId: Optional[str] - authorAvatar: Optional[str] - branchName: Optional[str] - createdAt: Optional[datetime] - sourceApplication: Optional[str] - referencedObject: Optional[str] - totalChildrenCount: Optional[int] - parents: Optional[List[str]] - - def __repr__(self) -> str: - return ( - f"Commit( id: {self.id}, message: {self.message}, referencedObject:" - f" {self.referencedObject}, authorName: {self.authorName}, branchName:" - f" {self.branchName}, createdAt: {self.createdAt} )" - ) - - def __str__(self) -> str: - return self.__repr__() - - -class Commits(BaseModel): - totalCount: Optional[int] - cursor: Optional[datetime] - items: List[Commit] = [] - - -class Object(BaseModel): - id: Optional[str] - speckleType: Optional[str] - applicationId: Optional[str] - totalChildrenCount: Optional[int] - createdAt: Optional[datetime] - - -class Branch(BaseModel): - id: Optional[str] - name: Optional[str] - description: Optional[str] - commits: Optional[Commits] - - -class Branches(BaseModel): - totalCount: Optional[int] - cursor: Optional[datetime] - items: List[Branch] = [] - - -class Stream(BaseModel): - id: Optional[str] = None - name: Optional[str] - role: Optional[str] = None - isPublic: Optional[bool] = None - description: Optional[str] = None - createdAt: Optional[datetime] = None - updatedAt: Optional[datetime] = None - collaborators: List[Collaborator] = Field(default_factory=list) - branches: Optional[Branches] = None - commit: Optional[Commit] = None - object: Optional[Object] = None - commentCount: Optional[int] = None - favoritedDate: Optional[datetime] = None - favoritesCount: Optional[int] = None - - def __repr__(self): - return ( - f"Stream( id: {self.id}, name: {self.name}, description:" - f" {self.description}, isPublic: {self.isPublic})" - ) - - def __str__(self) -> str: - return self.__repr__() - - -class Streams(BaseModel): - totalCount: Optional[int] - cursor: Optional[datetime] - items: List[Stream] = [] - - -class User(BaseModel): - id: Optional[str] - email: Optional[str] - name: Optional[str] - bio: Optional[str] - company: Optional[str] - avatar: Optional[str] - verified: Optional[bool] - role: Optional[str] - streams: Optional[Streams] - - def __repr__(self): - return ( - f"User( id: {self.id}, name: {self.name}, email: {self.email}, company:" - f" {self.company} )" - ) - - def __str__(self) -> str: - return self.__repr__() - - -class LimitedUser(BaseModel): - """Limited user type, for showing public info about a user to another user.""" - - id: str - name: Optional[str] - bio: Optional[str] - company: Optional[str] - avatar: Optional[str] - verified: Optional[bool] - role: Optional[str] - - -class PendingStreamCollaborator(BaseModel): - id: Optional[str] - inviteId: Optional[str] - streamId: Optional[str] - streamName: Optional[str] - title: Optional[str] - role: Optional[str] - invitedBy: Optional[User] - user: Optional[User] - token: Optional[str] - - def __repr__(self): - return ( - f"PendingStreamCollaborator( inviteId: {self.inviteId}, streamId:" - f" {self.streamId}, role: {self.role}, title: {self.title}, invitedBy:" - f" {self.user.name if self.user else None})" - ) - - def __str__(self) -> str: - return self.__repr__() - - -class Activity(BaseModel): - actionType: Optional[str] - info: Optional[dict] - userId: Optional[str] - streamId: Optional[str] - resourceId: Optional[str] - resourceType: Optional[str] - message: Optional[str] - time: Optional[datetime] - - def __repr__(self) -> str: - return ( - f"Activity( streamId: {self.streamId}, actionType: {self.actionType}," - f" message: {self.message}, userId: {self.userId} )" - ) - - def __str__(self) -> str: - return self.__repr__() - - -class ActivityCollection(BaseModel): - totalCount: Optional[int] - items: Optional[List[Activity]] - cursor: Optional[datetime] - - def __repr__(self) -> str: - return ( - f"ActivityCollection( totalCount: {self.totalCount}, items:" - f" {len(self.items) if self.items else 0}, cursor:" - f" {self.cursor.isoformat() if self.cursor else None} )" - ) - - def __str__(self) -> str: - return self.__repr__() - - -class ServerInfo(BaseModel): - name: Optional[str] = None - company: Optional[str] = None - url: Optional[str] = None - description: Optional[str] = None - adminContact: Optional[str] = None - canonicalUrl: Optional[str] = None - roles: Optional[List[dict]] = None - scopes: Optional[List[dict]] = None - authStrategies: Optional[List[dict]] = None - version: Optional[str] = None +# following imports seem to be unnecessary, but they need to stay +# to not break the scripts using these functions as non-core +from specklepy.core.api.models import (Collaborator, Commit, + Commits, Object, Branch, Branches, + Stream, Streams, User, LimitedUser, + PendingStreamCollaborator, Activity, + ActivityCollection, ServerInfo) diff --git a/src/specklepy/api/operations.py b/src/specklepy/api/operations.py index 600f0641..a86da776 100644 --- a/src/specklepy/api/operations.py +++ b/src/specklepy/api/operations.py @@ -7,6 +7,11 @@ from specklepy.transports.abstract_transport import AbstractTransport from specklepy.transports.sqlite import SQLiteTransport +from specklepy.core.api.operations import (send as core_send, + receive as _untracked_receive, + serialize as core_serialize, + deserialize as core_deserialize) + def send( base: Base, @@ -24,47 +29,18 @@ def send( Returns: str -- the object id of the sent object """ - - if not transports and not use_default_cache: - raise SpeckleException( - message=( - "You need to provide at least one transport: cannot send with an empty" - " transport list and no default cache" - ) - ) - - if isinstance(transports, AbstractTransport): - transports = [transports] - if transports is None: metrics.track(metrics.SEND) - transports = [] else: metrics.track(metrics.SEND, getattr(transports[0], "account", None)) - if use_default_cache: - transports.insert(0, SQLiteTransport()) - - serializer = BaseObjectSerializer(write_transports=transports) - - obj_hash, _ = serializer.write_json(base=base) - - return obj_hash + return core_send(base, transports, use_default_cache) def receive( obj_id: str, remote_transport: Optional[AbstractTransport] = None, local_transport: Optional[AbstractTransport] = None, -) -> Base: - metrics.track(metrics.RECEIVE, getattr(remote_transport, "account", None)) - return _untracked_receive(obj_id, remote_transport, local_transport) - - -def _untracked_receive( - obj_id: str, - remote_transport: Optional[AbstractTransport] = None, - local_transport: Optional[AbstractTransport] = None, ) -> Base: """Receives an object from a transport. @@ -77,29 +53,8 @@ def _untracked_receive( Returns: Base -- the base object """ - if not local_transport: - local_transport = SQLiteTransport() - - serializer = BaseObjectSerializer(read_transport=local_transport) - - # try local transport first. if the parent is there, we assume all the children are there and continue with deserialization using the local transport - obj_string = local_transport.get_object(obj_id) - if obj_string: - return serializer.read_json(obj_string=obj_string) - - if not remote_transport: - raise SpeckleException( - message=( - "Could not find the specified object using the local transport, and you" - " didn't provide a fallback remote from which to pull it." - ) - ) - - obj_string = remote_transport.copy_object_and_children( - id=obj_id, target_transport=local_transport - ) - - return serializer.read_json(obj_string=obj_string) + metrics.track(metrics.RECEIVE, getattr(remote_transport, "account", None)) + return _untracked_receive(obj_id, remote_transport, local_transport) def serialize(base: Base, write_transports: List[AbstractTransport] = []) -> str: @@ -116,11 +71,8 @@ def serialize(base: Base, write_transports: List[AbstractTransport] = []) -> str Returns: str -- the serialized object """ - metrics.track(metrics.SERIALIZE) - serializer = BaseObjectSerializer(write_transports=write_transports) - - return serializer.write_json(base)[1] - + metrics.track(metrics.SDK, custom_props={"name": "Serialize"}) + return core_serialize(base, write_transports) def deserialize( obj_string: str, read_transport: Optional[AbstractTransport] = None @@ -141,13 +93,8 @@ def deserialize( Returns: Base -- the deserialized object """ - metrics.track(metrics.DESERIALIZE) - if not read_transport: - read_transport = SQLiteTransport() - - serializer = BaseObjectSerializer(read_transport=read_transport) - - return serializer.read_json(obj_string=obj_string) + metrics.track(metrics.SDK, custom_props={"name": "Deserialize"}) + return core_deserialize(obj_string, read_transport) __all__ = ["receive", "send", "serialize", "deserialize"] diff --git a/src/specklepy/api/resource.py b/src/specklepy/api/resource.py index 47e24dfa..0727b44a 100644 --- a/src/specklepy/api/resource.py +++ b/src/specklepy/api/resource.py @@ -1,3 +1,4 @@ +from threading import Lock from typing import Any, Dict, List, Optional, Tuple, Type, Union from gql.client import Client @@ -13,8 +14,12 @@ from specklepy.serialization.base_object_serializer import BaseObjectSerializer from specklepy.transports.sqlite import SQLiteTransport +# following imports seem to be unnecessary, but they need to stay +# to not break the scripts using these functions as non-core +from specklepy.core.api.resource import ResourceBase as CoreResourceBase -class ResourceBase(object): + +class ResourceBase(CoreResourceBase): def __init__( self, account: Account, @@ -23,106 +28,11 @@ def __init__( name: str, server_version: Optional[Tuple[Any, ...]] = None, ) -> None: - self.account = account - self.basepath = basepath - self.client = client - self.name = name - self.server_version = server_version - self.schema: Optional[Type] = None - - def _step_into_response(self, response: dict, return_type: Union[str, List, None]): - """Step into the dict to get the relevant data""" - if return_type is None: - return response - if isinstance(return_type, str): - return response[return_type] - if isinstance(return_type, List): - for key in return_type: - response = response[key] - return response - - def _parse_response(self, response: Union[dict, list, None], schema=None): - """Try to create a class instance from the response""" - if response is None: - return None - if isinstance(response, list): - return [self._parse_response(response=r, schema=schema) for r in response] - if schema: - return schema.parse_obj(response) - elif self.schema: - try: - return self.schema.parse_obj(response) - except Exception: - s = BaseObjectSerializer(read_transport=SQLiteTransport()) - return s.recompose_base(response) - else: - return response - - def make_request( - self, - query: DocumentNode, - params: Optional[Dict] = None, - return_type: Union[str, List, None] = None, - schema=None, - parse_response: bool = True, - ) -> Any: - """Executes the GraphQL query""" - try: - response = self.client.execute(query, variable_values=params) - except Exception as ex: - if isinstance(ex, TransportQueryError): - return GraphQLException( - message=( - f"Failed to execute the GraphQL {self.name} request. Errors:" - f" {ex.errors}" - ), - errors=ex.errors, - data=ex.data, - ) - else: - return SpeckleException( - message=( - f"Failed to execute the GraphQL {self.name} request. Inner" - f" exception: {ex}" - ), - exception=ex, - ) - - response = self._step_into_response(response=response, return_type=return_type) - - if parse_response: - return self._parse_response(response=response, schema=schema) - else: - return response - - def _check_server_version_at_least( - self, target_version: Tuple[Any, ...], unsupported_message: Optional[str] = None - ): - """Use this check to guard against making unsupported requests on older servers. - - Arguments: - target_version {tuple} - the minimum server version in the format (major, minor, patch, (tag, build)) - eg (2, 6, 3) for a stable build and (2, 6, 4, 'alpha', 4711) for alpha - """ - if not unsupported_message: - unsupported_message = ( - "The client method used is not supported on Speckle Server versions" - f" prior to v{'.'.join(target_version)}" - ) - # if version is dev, it should be supported... (or not) - if self.server_version == ("dev",): - return - if self.server_version and self.server_version < target_version: - raise UnsupportedException(unsupported_message) - - def _check_invites_supported(self): - """Invites are only supported for Speckle Server >= 2.6.4. - Use this check to guard against making unsupported requests on older servers. - """ - self._check_server_version_at_least( - (2, 6, 4), - "Stream invites are only supported as of Speckle Server v2.6.4. Please" - " update your Speckle Server to use this method or use the" - " `grant_permission` flow instead.", + super().__init__( + account = account, + basepath = basepath, + client = client, + name = name, + server_version = server_version ) + \ No newline at end of file diff --git a/src/specklepy/api/resources/active_user.py b/src/specklepy/api/resources/active_user.py index 9f9128a6..20a2ef03 100644 --- a/src/specklepy/api/resources/active_user.py +++ b/src/specklepy/api/resources/active_user.py @@ -8,10 +8,10 @@ from specklepy.logging import metrics from specklepy.logging.exceptions import SpeckleException -NAME = "active_user" +from specklepy.core.api.resources.active_user import Resource as CoreResource -class Resource(ResourceBase): +class Resource(CoreResource): """API Access class for users""" def __init__(self, account, basepath, client, server_version) -> None: @@ -19,7 +19,6 @@ def __init__(self, account, basepath, client, server_version) -> None: account=account, basepath=basepath, client=client, - name=NAME, server_version=server_version, ) self.schema = User @@ -35,28 +34,8 @@ def get(self) -> User: Returns: User -- the retrieved user """ - metrics.track(metrics.USER, self.account, {"name": "get"}) - query = gql( - """ - query User { - activeUser { - id - email - name - bio - company - avatar - verified - profiles - role - } - } - """ - ) - - params = {} - - return self.make_request(query=query, params=params, return_type="activeUser") + metrics.track(metrics.SDK, custom_props={"name": "User Active Get"}) + return super().get() def update( self, @@ -76,28 +55,8 @@ def update( Returns @deprecated(version=DEPRECATION_VERSION, reason=DEPRECATION_TEXT): bool -- True if your profile was updated successfully """ - metrics.track(metrics.USER, self.account, {"name": "update"}) - query = gql( - """ - mutation UserUpdate($user: UserUpdateInput!) { - userUpdate(user: $user) - } - """ - ) - params = {"name": name, "company": company, "bio": bio, "avatar": avatar} - - params = {"user": {k: v for k, v in params.items() if v is not None}} - - if not params["user"]: - return SpeckleException( - message=( - "You must provide at least one field to update your user profile" - ) - ) - - return self.make_request( - query=query, params=params, return_type="userUpdate", parse_response=False - ) + metrics.track(metrics.SDK, self.account, {"name": "User Active Update"}) + return super().update(name, company, bio, avatar) def activity( self, @@ -126,56 +85,8 @@ def activity( (ie: return all activity _after_ this time) cursor {datetime} -- timestamp cursor for pagination """ - - query = gql( - """ - query UserActivity( - $action_type: String, - $before:DateTime, - $after: DateTime, - $cursor: DateTime, - $limit: Int - ){ - activeUser { - activity( - actionType: $action_type, - before: $before, - after: $after, - cursor: $cursor, - limit: $limit - ) { - totalCount - cursor - items { - actionType - info - userId - streamId - resourceId - resourceType - message - time - } - } - } - } - """ - ) - - params = { - "limit": limit, - "action_type": action_type, - "before": before.astimezone(timezone.utc).isoformat() if before else before, - "after": after.astimezone(timezone.utc).isoformat() if after else after, - "cursor": cursor.astimezone(timezone.utc).isoformat() if cursor else cursor, - } - - return self.make_request( - query=query, - params=params, - return_type=["activeUser", "activity"], - schema=ActivityCollection, - ) + metrics.track(metrics.SDK, self.account, {"name": "User Active Activity"}) + return super().activity(limit, action_type, before, after, cursor) def get_all_pending_invites(self) -> List[PendingStreamCollaborator]: """Get all of the active user's pending stream invites @@ -186,36 +97,8 @@ def get_all_pending_invites(self) -> List[PendingStreamCollaborator]: List[PendingStreamCollaborator] -- a list of pending invites for the current user """ - metrics.track(metrics.INVITE, self.account, {"name": "get"}) - self._check_invites_supported() - - query = gql( - """ - query StreamInvites { - streamInvites{ - id - token - inviteId - streamId - streamName - title - role - invitedBy { - id - name - company - avatar - } - } - } - """ - ) - - return self.make_request( - query=query, - return_type="streamInvites", - schema=PendingStreamCollaborator, - ) + metrics.track(metrics.SDK, self.account, {"name": "User Active Invites All Get"}) + return super().get_all_pending_invites() def get_pending_invite( self, stream_id: str, token: Optional[str] = None @@ -233,37 +116,5 @@ def get_pending_invite( PendingStreamCollaborator -- the invite for the given stream (or None if it isn't found) """ - metrics.track(metrics.INVITE, self.account, {"name": "get"}) - self._check_invites_supported() - - query = gql( - """ - query StreamInvite($streamId: String!, $token: String) { - streamInvite(streamId: $streamId, token: $token) { - id - token - streamId - streamName - title - role - invitedBy { - id - name - company - avatar - } - } - } - """ - ) - - params = {"streamId": stream_id} - if token: - params["token"] = token - - return self.make_request( - query=query, - params=params, - return_type="streamInvite", - schema=PendingStreamCollaborator, - ) + metrics.track(metrics.SDK, self.account, {"name": "User Active Invite Get"}) + return super().get_pending_invite(stream_id, token) diff --git a/src/specklepy/api/resources/branch.py b/src/specklepy/api/resources/branch.py index a861c871..11b5022b 100644 --- a/src/specklepy/api/resources/branch.py +++ b/src/specklepy/api/resources/branch.py @@ -7,10 +7,10 @@ from specklepy.logging import metrics from specklepy.logging.exceptions import SpeckleException -NAME = "branch" +from specklepy.core.api.resources.branch import Resource as CoreResource -class Resource(ResourceBase): +class Resource(CoreResource): """API Access class for branches""" def __init__(self, account, basepath, client) -> None: @@ -18,7 +18,6 @@ def __init__(self, account, basepath, client) -> None: account=account, basepath=basepath, client=client, - name=NAME, ) self.schema = Branch @@ -34,29 +33,8 @@ def create( Returns: id {str} -- the newly created branch's id """ - metrics.track(metrics.BRANCH, self.account, {"name": "create"}) - query = gql( - """ - mutation BranchCreate($branch: BranchCreateInput!) { - branchCreate(branch: $branch) - } - """ - ) - if len(name) < 3: - return SpeckleException( - message="Branch Name must be at least 3 characters" - ) - params = { - "branch": { - "streamId": stream_id, - "name": name, - "description": description, - } - } - - return self.make_request( - query=query, params=params, return_type="branchCreate", parse_response=False - ) + metrics.track(metrics.SDK, self.account, {"name": "Branch Create"}) + return super().create(stream_id, name, description) def get(self, stream_id: str, name: str, commits_limit: int = 10): """Get a branch by name from a stream @@ -69,42 +47,8 @@ def get(self, stream_id: str, name: str, commits_limit: int = 10): Returns: Branch -- the fetched branch with its latest commits """ - metrics.track(metrics.BRANCH, self.account, {"name": "get"}) - query = gql( - """ - query BranchGet($stream_id: String!, $name: String!, $commits_limit: Int!) { - stream(id: $stream_id) { - branch(name: $name) { - id, - name, - description, - commits (limit: $commits_limit) { - totalCount, - cursor, - items { - id, - referencedObject, - sourceApplication, - totalChildrenCount, - message, - authorName, - authorId, - branchName, - parents, - createdAt - } - } - } - } - } - """ - ) - - params = {"stream_id": stream_id, "name": name, "commits_limit": commits_limit} - - return self.make_request( - query=query, params=params, return_type=["stream", "branch"] - ) + metrics.track(metrics.SDK, self.account, {"name": "Branch Get"}) + return super().get(stream_id, name, commits_limit) def list(self, stream_id: str, branches_limit: int = 10, commits_limit: int = 10): """Get a list of branches from a given stream @@ -117,50 +61,8 @@ def list(self, stream_id: str, branches_limit: int = 10, commits_limit: int = 10 Returns: List[Branch] -- the branches on the stream """ - metrics.track(metrics.BRANCH, self.account, {"name": "get"}) - query = gql( - """ - query BranchesGet( - $stream_id: String!, - $branches_limit: Int!, - $commits_limit: Int! - ) { - stream(id: $stream_id) { - branches(limit: $branches_limit) { - items { - id - name - description - commits(limit: $commits_limit) { - totalCount - items{ - id - message - referencedObject - sourceApplication - parents - authorId - authorName - branchName - createdAt - } - } - } - } - } - } - """ - ) - - params = { - "stream_id": stream_id, - "branches_limit": branches_limit, - "commits_limit": commits_limit, - } - - return self.make_request( - query=query, params=params, return_type=["stream", "branches", "items"] - ) + metrics.track(metrics.SDK, self.account, {"name": "Branch List"}) + return super().list(stream_id, branches_limit, commits_limit) def update( self, @@ -180,29 +82,8 @@ def update( Returns: bool -- True if update is successful """ - metrics.track(metrics.BRANCH, self.account, {"name": "update"}) - query = gql( - """ - mutation BranchUpdate($branch: BranchUpdateInput!) { - branchUpdate(branch: $branch) - } - """ - ) - params = { - "branch": { - "streamId": stream_id, - "id": branch_id, - } - } - - if name: - params["branch"]["name"] = name - if description: - params["branch"]["description"] = description - - return self.make_request( - query=query, params=params, return_type="branchUpdate", parse_response=False - ) + metrics.track(metrics.SDK, self.account, {"name": "Branch Update"}) + return super().update(stream_id, branch_id, name, description) def delete(self, stream_id: str, branch_id: str): """Delete a branch @@ -214,17 +95,5 @@ def delete(self, stream_id: str, branch_id: str): Returns: bool -- True if deletion is successful """ - metrics.track(metrics.BRANCH, self.account, {"name": "delete"}) - query = gql( - """ - mutation BranchDelete($branch: BranchDeleteInput!) { - branchDelete(branch: $branch) - } - """ - ) - - params = {"branch": {"streamId": stream_id, "id": branch_id}} - - return self.make_request( - query=query, params=params, return_type="branchDelete", parse_response=False - ) + metrics.track(metrics.SDK, self.account, {"name": "Branch Delete"}) + return super().delete(stream_id, branch_id) diff --git a/src/specklepy/api/resources/commit.py b/src/specklepy/api/resources/commit.py index fa190253..0f69bf75 100644 --- a/src/specklepy/api/resources/commit.py +++ b/src/specklepy/api/resources/commit.py @@ -6,10 +6,10 @@ from specklepy.api.resource import ResourceBase from specklepy.logging import metrics -NAME = "commit" +from specklepy.core.api.resources.commit import Resource as CoreResource -class Resource(ResourceBase): +class Resource(CoreResource): """API Access class for commits""" def __init__(self, account, basepath, client) -> None: @@ -17,7 +17,6 @@ def __init__(self, account, basepath, client) -> None: account=account, basepath=basepath, client=client, - name=NAME, ) self.schema = Commit @@ -32,32 +31,8 @@ def get(self, stream_id: str, commit_id: str) -> Commit: Returns: Commit -- the retrieved commit object """ - query = gql( - """ - query Commit($stream_id: String!, $commit_id: String!) { - stream(id: $stream_id) { - commit(id: $commit_id) { - id - message - referencedObject - authorId - authorName - authorAvatar - branchName - createdAt - sourceApplication - totalChildrenCount - parents - } - } - } - """ - ) - params = {"stream_id": stream_id, "commit_id": commit_id} - - return self.make_request( - query=query, params=params, return_type=["stream", "commit"] - ) + metrics.track(metrics.SDK, self.account, {"name": "Commit Get"}) + return super().get(stream_id, commit_id) def list(self, stream_id: str, limit: int = 10) -> List[Commit]: """ @@ -70,36 +45,8 @@ def list(self, stream_id: str, limit: int = 10) -> List[Commit]: Returns: List[Commit] -- a list of the most recent commit objects """ - metrics.track(metrics.COMMIT, self.account, {"name": "get"}) - query = gql( - """ - query Commits($stream_id: String!, $limit: Int!) { - stream(id: $stream_id) { - commits(limit: $limit) { - items { - id - message - referencedObject - authorName - authorId - authorName - authorAvatar - branchName - createdAt - sourceApplication - totalChildrenCount - parents - } - } - } - } - """ - ) - params = {"stream_id": stream_id, "limit": limit} - - return self.make_request( - query=query, params=params, return_type=["stream", "commits", "items"] - ) + metrics.track(metrics.SDK, self.account, {"name": "Commit List"}) + return super().list(stream_id, limit) def create( self, @@ -128,28 +75,8 @@ def create( Returns: str -- the id of the created commit """ - metrics.track(metrics.COMMIT, self.account, {"name": "create"}) - query = gql( - """ - mutation CommitCreate ($commit: CommitCreateInput!) - { commitCreate(commit: $commit)} - """ - ) - params = { - "commit": { - "streamId": stream_id, - "branchName": branch_name, - "objectId": object_id, - "message": message, - "sourceApplication": source_application, - } - } - if parents: - params["commit"]["parents"] = parents - - return self.make_request( - query=query, params=params, return_type="commitCreate", parse_response=False - ) + metrics.track(metrics.SDK, self.account, {"name": "Commit Create"}) + return super().create(stream_id, object_id, branch_name, message, source_application, parents) def update(self, stream_id: str, commit_id: str, message: str) -> bool: """ @@ -164,20 +91,8 @@ def update(self, stream_id: str, commit_id: str, message: str) -> bool: Returns: bool -- True if the operation succeeded """ - metrics.track(metrics.COMMIT, self.account, {"name": "update"}) - query = gql( - """ - mutation CommitUpdate($commit: CommitUpdateInput!) - { commitUpdate(commit: $commit)} - """ - ) - params = { - "commit": {"streamId": stream_id, "id": commit_id, "message": message} - } - - return self.make_request( - query=query, params=params, return_type="commitUpdate", parse_response=False - ) + metrics.track(metrics.SDK, self.account, {"name": "Commit Update"}) + return super().update(stream_id, commit_id, message) def delete(self, stream_id: str, commit_id: str) -> bool: """ @@ -191,18 +106,8 @@ def delete(self, stream_id: str, commit_id: str) -> bool: Returns: bool -- True if the operation succeeded """ - metrics.track(metrics.COMMIT, self.account, {"name": "delete"}) - query = gql( - """ - mutation CommitDelete($commit: CommitDeleteInput!) - { commitDelete(commit: $commit)} - """ - ) - params = {"commit": {"streamId": stream_id, "id": commit_id}} - - return self.make_request( - query=query, params=params, return_type="commitDelete", parse_response=False - ) + metrics.track(metrics.SDK, self.account, {"name": "Commit Delete"}) + return super().delete(stream_id, commit_id) def received( self, @@ -214,30 +119,5 @@ def received( """ Mark a commit object a received by the source application. """ - metrics.track(metrics.COMMIT, self.account, {"name": "received"}) - query = gql( - """ - mutation CommitReceive($receivedInput:CommitReceivedInput!){ - commitReceive(input:$receivedInput) - } - """ - ) - params = { - "receivedInput": { - "sourceApplication": source_application, - "streamId": stream_id, - "commitId": commit_id, - "message": "message", - } - } - - try: - return self.make_request( - query=query, - params=params, - return_type="commitReceive", - parse_response=False, - ) - except Exception as ex: - print(ex.with_traceback) - return False + metrics.track(metrics.SDK, self.account, {"name": "Commit Received"}) + return super().received(stream_id, commit_id, source_application, message) diff --git a/src/specklepy/api/resources/object.py b/src/specklepy/api/resources/object.py index 70e116b5..3cc5dca2 100644 --- a/src/specklepy/api/resources/object.py +++ b/src/specklepy/api/resources/object.py @@ -5,10 +5,12 @@ from specklepy.api.resource import ResourceBase from specklepy.objects.base import Base -NAME = "object" +from specklepy.logging import metrics +from specklepy.core.api.resources.object import Resource as CoreResource -class Resource(ResourceBase): + +class Resource(CoreResource): """API Access class for objects""" def __init__(self, account, basepath, client) -> None: @@ -16,7 +18,6 @@ def __init__(self, account, basepath, client) -> None: account=account, basepath=basepath, client=client, - name=NAME, ) self.schema = Base @@ -31,31 +32,8 @@ def get(self, stream_id: str, object_id: str) -> Base: Returns: Base -- the returned Base object """ - query = gql( - """ - query Object($stream_id: String!, $object_id: String!) { - stream(id: $stream_id) { - id - name - object(id: $object_id) { - id - speckleType - applicationId - createdAt - totalChildrenCount - data - } - } - } - """ - ) - params = {"stream_id": stream_id, "object_id": object_id} - - return self.make_request( - query=query, - params=params, - return_type=["stream", "object", "data"], - ) + metrics.track(metrics.SDK, self.account, {"name": "Object Get"}) + return super().get(stream_id, object_id) def create(self, stream_id: str, objects: List[Dict]) -> str: """ @@ -78,15 +56,6 @@ def create(self, stream_id: str, objects: List[Dict]) -> str: Returns: str -- the id of the object """ - query = gql( - """ - mutation ObjectCreate($object_input: ObjectCreateInput!) { - objectCreate(objectInput: $object_input) - } - """ - ) - params = {"object_input": {"streamId": stream_id, "objects": objects}} - - return self.make_request( - query=query, params=params, return_type="objectCreate", parse_response=False - ) + metrics.track(metrics.SDK, self.account, {"name": "Object Create"}) + return super().create(stream_id, objects) + \ No newline at end of file diff --git a/src/specklepy/api/resources/other_user.py b/src/specklepy/api/resources/other_user.py index a95f3700..b277b0a4 100644 --- a/src/specklepy/api/resources/other_user.py +++ b/src/specklepy/api/resources/other_user.py @@ -8,10 +8,10 @@ from specklepy.logging import metrics from specklepy.logging.exceptions import SpeckleException -NAME = "other_user" +from specklepy.core.api.resources.other_user import Resource as CoreResource -class Resource(ResourceBase): +class Resource(CoreResource): """API Access class for other users, that are not the currently active user.""" def __init__(self, account, basepath, client, server_version) -> None: @@ -19,7 +19,6 @@ def __init__(self, account, basepath, client, server_version) -> None: account=account, basepath=basepath, client=client, - name=NAME, server_version=server_version, ) self.schema = LimitedUser @@ -34,26 +33,8 @@ def get(self, id: str) -> LimitedUser: Returns: LimitedUser -- the retrieved profile of another user """ - metrics.track(metrics.OTHER_USER, self.account, {"name": "get"}) - query = gql( - """ - query OtherUser($id: String!) { - otherUser(id: $id) { - id - name - bio - company - avatar - verified - role - } - } - """ - ) - - params = {"id": id} - - return self.make_request(query=query, params=params, return_type="otherUser") + metrics.track(metrics.SDK, self.account, {"name": "Other User Get"}) + return super().get(id) def search( self, search_query: str, limit: int = 25 @@ -72,28 +53,8 @@ def search( message="User search query must be at least 3 characters" ) - metrics.track(metrics.OTHER_USER, self.account, {"name": "search"}) - query = gql( - """ - query UserSearch($search_query: String!, $limit: Int!) { - userSearch(query: $search_query, limit: $limit) { - items { - id - name - bio - company - avatar - verified - } - } - } - """ - ) - params = {"search_query": search_query, "limit": limit} - - return self.make_request( - query=query, params=params, return_type=["userSearch", "items"] - ) + metrics.track(metrics.SDK, self.account, {"name": "Other User Search"}) + return super().search(search_query, limit) def activity( self, @@ -121,55 +82,6 @@ def activity( (ie: return all activity _after_ this time) cursor {datetime} -- timestamp cursor for pagination """ - - query = gql( - """ - query UserActivity( - $user_id: String!, - $action_type: String, - $before:DateTime, - $after: DateTime, - $cursor: DateTime, - $limit: Int - ){ - otherUser(id: $user_id) { - activity( - actionType: $action_type, - before: $before, - after: $after, - cursor: $cursor, - limit: $limit - ) { - totalCount - cursor - items { - actionType - info - userId - streamId - resourceId - resourceType - message - time - } - } - } - } - """ - ) - - params = { - "user_id": user_id, - "limit": limit, - "action_type": action_type, - "before": before.astimezone(timezone.utc).isoformat() if before else before, - "after": after.astimezone(timezone.utc).isoformat() if after else after, - "cursor": cursor.astimezone(timezone.utc).isoformat() if cursor else cursor, - } - - return self.make_request( - query=query, - params=params, - return_type=["otherUser", "activity"], - schema=ActivityCollection, - ) + metrics.track(metrics.SDK, self.account, {"name": "Other User Activity"}) + return super().activity(user_id, limit, action_type, before, after, cursor) + \ No newline at end of file diff --git a/src/specklepy/api/resources/server.py b/src/specklepy/api/resources/server.py index c7566cc4..1af78d08 100644 --- a/src/specklepy/api/resources/server.py +++ b/src/specklepy/api/resources/server.py @@ -8,10 +8,10 @@ from specklepy.logging import metrics from specklepy.logging.exceptions import GraphQLException -NAME = "server" +from specklepy.core.api.resources.server import Resource as CoreResource -class Resource(ResourceBase): +class Resource(CoreResource): """API Access class for the server""" def __init__(self, account, basepath, client) -> None: @@ -19,7 +19,6 @@ def __init__(self, account, basepath, client) -> None: account=account, basepath=basepath, client=client, - name=NAME, ) def get(self) -> ServerInfo: @@ -28,39 +27,8 @@ def get(self) -> ServerInfo: Returns: dict -- the server info in dictionary form """ - metrics.track(metrics.SERVER, self.account, {"name": "get"}) - query = gql( - """ - query Server { - serverInfo { - name - company - description - adminContact - canonicalUrl - version - roles { - name - description - resourceTarget - } - scopes { - name - description - } - authStrategies{ - id - name - icon - } - } - } - """ - ) - - return self.make_request( - query=query, return_type="serverInfo", schema=ServerInfo - ) + metrics.track(metrics.SDK, self.account, {"name": "Server Get"}) + return super().get() def version(self) -> Tuple[Any, ...]: """Get the server version @@ -70,30 +38,7 @@ def version(self) -> Tuple[Any, ...]: eg (2, 6, 3) for a stable build and (2, 6, 4, 'alpha', 4711) for alpha """ # not tracking as it will be called along with other mutations / queries as a check - query = gql( - """ - query Server { - serverInfo { - version - } - } - """ - ) - ver = self.make_request( - query=query, return_type=["serverInfo", "version"], parse_response=False - ) - if isinstance(ver, Exception): - raise GraphQLException( - f"Could not get server version for {self.basepath}", [ver] - ) - - # pylint: disable=consider-using-generator; (list comp is faster) - return tuple( - [ - int(segment) if segment.isdigit() else segment - for segment in re.split(r"\.|-", ver) - ] - ) + return super().version() def apps(self) -> Dict: """Get the apps registered on the server @@ -101,28 +46,8 @@ def apps(self) -> Dict: Returns: dict -- a dictionary of apps registered on the server """ - metrics.track(metrics.SERVER, self.account, {"name": "apps"}) - query = gql( - """ - query Apps { - apps{ - id - name - description - termsAndConditionsLink - trustByDefault - logo - author { - id - name - avatar - } - } - } - """ - ) - - return self.make_request(query=query, return_type="apps", parse_response=False) + metrics.track(metrics.SDK, self.account, {"name": "Server Apps"}) + return super().apps() def create_token(self, name: str, scopes: List[str], lifespan: int) -> str: """Create a personal API token @@ -135,22 +60,8 @@ def create_token(self, name: str, scopes: List[str], lifespan: int) -> str: Returns: str -- the new API token. note: this is the only time you'll see the token! """ - metrics.track(metrics.SERVER, self.account, {"name": "create_token"}) - query = gql( - """ - mutation TokenCreate($token: ApiTokenCreateInput!) { - apiTokenCreate(token: $token) - } - """ - ) - params = {"token": {"scopes": scopes, "name": name, "lifespan": lifespan}} - - return self.make_request( - query=query, - params=params, - return_type="apiTokenCreate", - parse_response=False, - ) + metrics.track(metrics.SDK, self.account, {"name": "Server Create Token"}) + return super().create_token(name, scopes, lifespan) def revoke_token(self, token: str) -> bool: """Revokes (deletes) a personal API token @@ -161,19 +72,5 @@ def revoke_token(self, token: str) -> bool: Returns: bool -- True if the token was successfully deleted """ - metrics.track(metrics.SERVER, self.account, {"name": "revoke_token"}) - query = gql( - """ - mutation TokenRevoke($token: String!) { - apiTokenRevoke(token: $token) - } - """ - ) - params = {"token": token} - - return self.make_request( - query=query, - params=params, - return_type="apiTokenRevoke", - parse_response=False, - ) + metrics.track(metrics.SDK, self.account, {"name": "Server Revoke Token"}) + return super().revoke_token(token) diff --git a/src/specklepy/api/resources/stream.py b/src/specklepy/api/resources/stream.py index 0eff18f8..26b2a6cb 100644 --- a/src/specklepy/api/resources/stream.py +++ b/src/specklepy/api/resources/stream.py @@ -9,10 +9,10 @@ from specklepy.logging import metrics from specklepy.logging.exceptions import SpeckleException, UnsupportedException -NAME = "stream" +from specklepy.core.api.resources.stream import Resource as CoreResource -class Resource(ResourceBase): +class Resource(CoreResource): """API Access class for streams""" def __init__(self, account, basepath, client, server_version) -> None: @@ -20,7 +20,6 @@ def __init__(self, account, basepath, client, server_version) -> None: account=account, basepath=basepath, client=client, - name=NAME, server_version=server_version, ) @@ -37,56 +36,8 @@ def get(self, id: str, branch_limit: int = 10, commit_limit: int = 10) -> Stream Returns: Stream -- the retrieved stream """ - metrics.track(metrics.STREAM, self.account, {"name": "get"}) - query = gql( - """ - query Stream($id: String!, $branch_limit: Int!, $commit_limit: Int!) { - stream(id: $id) { - id - name - role - description - isPublic - createdAt - updatedAt - commentCount - favoritesCount - collaborators { - id - name - role - avatar - } - branches(limit: $branch_limit) { - totalCount - cursor - items { - id - name - description - commits(limit: $commit_limit) { - totalCount - cursor - items { - id - message - authorId - createdAt - authorName - referencedObject - sourceApplication - } - } - } - } - } - } - """ - ) - - params = {"id": id, "branch_limit": branch_limit, "commit_limit": commit_limit} - - return self.make_request(query=query, params=params, return_type="stream") + metrics.track(metrics.SDK, self.account, {"name": "Stream Get"}) + return super().get(id, branch_limit, commit_limit) def list(self, stream_limit: int = 10) -> List[Stream]: """Get a list of the user's streams @@ -97,50 +48,8 @@ def list(self, stream_limit: int = 10) -> List[Stream]: Returns: List[Stream] -- A list of Stream objects """ - metrics.track(metrics.STREAM, self.account, {"name": "get"}) - query = gql( - """ - query User($stream_limit: Int!) { - user { - id - bio - name - email - avatar - company - verified - profiles - role - streams(limit: $stream_limit) { - totalCount - cursor - items { - id - name - role - isPublic - createdAt - updatedAt - description - commentCount - favoritesCount - collaborators { - id - name - role - } - } - } - } - } - """ - ) - - params = {"stream_limit": stream_limit} - - return self.make_request( - query=query, params=params, return_type=["user", "streams", "items"] - ) + metrics.track(metrics.SDK, self.account, {"name": "Stream List"}) + return super().list(stream_limit) def create( self, @@ -159,25 +68,8 @@ def create( Returns: id {str} -- the id of the newly created stream """ - metrics.track(metrics.STREAM, self.account, {"name": "create"}) - query = gql( - """ - mutation StreamCreate($stream: StreamCreateInput!) { - streamCreate(stream: $stream) - } - """ - ) - if len(name) < 3 and len(name) != 0: - return SpeckleException( - message="Stream Name must be at least 3 characters" - ) - params = { - "stream": {"name": name, "description": description, "isPublic": is_public} - } - - return self.make_request( - query=query, params=params, return_type="streamCreate", parse_response=False - ) + metrics.track(metrics.SDK, self.account, {"name": "Stream Create"}) + return super().create(name, description, is_public) def update( self, @@ -198,27 +90,8 @@ def update( Returns: bool -- whether the stream update was successful """ - metrics.track(metrics.STREAM, self.account, {"name": "update"}) - query = gql( - """ - mutation StreamUpdate($stream: StreamUpdateInput!) { - streamUpdate(stream: $stream) - } - """ - ) - - params = { - "id": id, - "name": name, - "description": description, - "isPublic": is_public, - } - # remove None values so graphql doesn't cry - params = {"stream": {k: v for k, v in params.items() if v is not None}} - - return self.make_request( - query=query, params=params, return_type="streamUpdate", parse_response=False - ) + metrics.track(metrics.SDK, self.account, {"name": "Stream Update"}) + return super().update(id, name, description, is_public) def delete(self, id: str) -> bool: """Delete a stream given its id @@ -229,20 +102,8 @@ def delete(self, id: str) -> bool: Returns: bool -- whether the deletion was successful """ - metrics.track(metrics.STREAM, self.account, {"name": "delete"}) - query = gql( - """ - mutation StreamDelete($id: String!) { - streamDelete(id: $id) - } - """ - ) - - params = {"id": id} - - return self.make_request( - query=query, params=params, return_type="streamDelete", parse_response=False - ) + metrics.track(metrics.SDK, self.account, {"name": "Stream Delete"}) + return super().delete(id) def search( self, @@ -262,67 +123,8 @@ def search( Returns: List[Stream] -- a list of Streams that match the search query """ - metrics.track(metrics.STREAM, self.account, {"name": "search"}) - query = gql( - """ - query StreamSearch( - $search_query: String!, - $limit: Int!, - $branch_limit:Int!, - $commit_limit:Int! - ) { - streams(query: $search_query, limit: $limit) { - items { - id - name - role - description - isPublic - createdAt - updatedAt - collaborators { - id - name - role - avatar - } - branches(limit: $branch_limit) { - totalCount - cursor - items { - id - name - description - commits(limit: $commit_limit) { - totalCount - cursor - items { - id - referencedObject - message - authorName - authorId - createdAt - } - } - } - } - } - } - } - """ - ) - - params = { - "search_query": search_query, - "limit": limit, - "branch_limit": branch_limit, - "commit_limit": commit_limit, - } - - return self.make_request( - query=query, params=params, return_type=["streams", "items"] - ) + metrics.track(metrics.SDK, self.account, {"name": "Stream Search"}) + return super().search(search_query, limit, branch_limit, commit_limit) def favorite(self, stream_id: str, favorited: bool = True): """Favorite or unfavorite the given stream. @@ -335,86 +137,8 @@ def favorite(self, stream_id: str, favorited: bool = True): Returns: Stream -- the stream with its `id`, `name`, and `favoritedDate` """ - metrics.track(metrics.STREAM, self.account, {"name": "favorite"}) - query = gql( - """ - mutation StreamFavorite($stream_id: String!, $favorited: Boolean!) { - streamFavorite(streamId: $stream_id, favorited: $favorited) { - id - name - favoritedDate - favoritesCount - } - } - """ - ) - - params = { - "stream_id": stream_id, - "favorited": favorited, - } - - return self.make_request( - query=query, params=params, return_type=["streamFavorite"] - ) - - @deprecated( - version="2.6.4", - reason=( - "As of Speckle Server v2.6.4, this method is deprecated. Users need to be" - " invited and accept the invite before being added to a stream" - ), - ) - def grant_permission(self, stream_id: str, user_id: str, role: str): - """Grant permissions to a user on a given stream - - Valid for Speckle Server version < 2.6.4 - - Arguments: - stream_id {str} -- the id of the stream to grant permissions to - user_id {str} -- the id of the user to grant permissions for - role {str} -- the role to grant the user - - Returns: - bool -- True if the operation was successful - """ - metrics.track(metrics.PERMISSION, self.account, {"name": "add", "role": role}) - # we're checking for the actual version info, and if the version is 'dev' we treat it - # as an up to date instance - if self.server_version and ( - self.server_version == ("dev",) or self.server_version >= (2, 6, 4) - ): - raise UnsupportedException( - "Server mutation `grant_permission` is no longer supported as of" - " Speckle Server v2.6.4. Please use the new `update_permission` method" - " to change an existing user's permission or use the `invite` method to" - " invite a user to a stream." - ) - - query = gql( - """ - mutation StreamGrantPermission( - $permission_params: StreamGrantPermissionInput ! - ) { - streamGrantPermission(permissionParams: $permission_params) - } - """ - ) - - params = { - "permission_params": { - "streamId": stream_id, - "userId": user_id, - "role": role, - } - } - - return self.make_request( - query=query, - params=params, - return_type="streamGrantPermission", - parse_response=False, - ) + metrics.track(metrics.SDK, self.account, {"name": "Stream Favorite"}) + return super().favorite(stream_id, favorited) def get_all_pending_invites( self, stream_id: str @@ -431,46 +155,8 @@ def get_all_pending_invites( List[PendingStreamCollaborator] -- a list of pending invites for the specified stream """ - metrics.track(metrics.INVITE, self.account, {"name": "get"}) - self._check_invites_supported() - - query = gql( - """ - query StreamInvites($streamId: String!) { - stream(id: $streamId){ - pendingCollaborators { - id - token - inviteId - streamId - streamName - title - role - invitedBy{ - id - name - company - avatar - } - user { - id - name - company - avatar - } - } - } - } - """ - ) - params = {"streamId": stream_id} - - return self.make_request( - query=query, - params=params, - return_type=["stream", "pendingCollaborators"], - schema=PendingStreamCollaborator, - ) + metrics.track(metrics.SDK, self.account, {"name": "Stream Invite Get"}) + return super().get_all_pending_invites(stream_id) def invite( self, @@ -496,38 +182,8 @@ def invite( Returns: bool -- True if the operation was successful """ - metrics.track(metrics.INVITE, self.account, {"name": "create"}) - self._check_invites_supported() - - if email is None and user_id is None: - raise SpeckleException( - "You must provide either an email or a user id to use the" - " `stream.invite` method" - ) - - query = gql( - """ - mutation StreamInviteCreate($input: StreamInviteCreateInput!) { - streamInviteCreate(input: $input) - } - """ - ) - - params = { - "email": email, - "userId": user_id, - "streamId": stream_id, - "message": message, - "role": role, - } - params = {"input": {k: v for k, v in params.items() if v is not None}} - - return self.make_request( - query=query, - params=params, - return_type="streamInviteCreate", - parse_response=False, - ) + metrics.track(metrics.SDK, self.account, {"name": "Stream Invite Create"}) + return super().invite(stream_id, email, user_id, role, message) def invite_batch( self, @@ -552,42 +208,8 @@ def invite_batch( Returns: bool -- True if the operation was successful """ - metrics.track(metrics.INVITE, self.account, {"name": "batch create"}) - self._check_invites_supported() - if emails is None and user_ids is None: - raise SpeckleException( - "You must provide either an email or a user id to use the" - " `stream.invite` method" - ) - - query = gql( - """ - mutation StreamInviteBatchCreate($input: [StreamInviteCreateInput!]!) { - streamInviteBatchCreate(input: $input) - } - """ - ) - - email_invites = [ - {"streamId": stream_id, "message": message, "email": email} - for email in emails - if emails is not None - ] - - user_invites = [ - {"streamId": stream_id, "message": message, "userId": user_id} - for user_id in user_ids - if user_ids is not None - ] - - params = {"input": [*email_invites, *user_invites]} - - return self.make_request( - query=query, - params=params, - return_type="streamInviteBatchCreate", - parse_response=False, - ) + metrics.track(metrics.SDK, self.account, {"name": "Stream Invite Batch Create"}) + return super().invite_batch(stream_id, emails, user_ids, message) def invite_cancel(self, stream_id: str, invite_id: str) -> bool: """Cancel an existing stream invite @@ -601,25 +223,8 @@ def invite_cancel(self, stream_id: str, invite_id: str) -> bool: Returns: bool -- true if the operation was successful """ - metrics.track(metrics.INVITE, self.account, {"name": "cancel"}) - self._check_invites_supported() - - query = gql( - """ - mutation StreamInviteCancel($streamId: String!, $inviteId: String!) { - streamInviteCancel(streamId: $streamId, inviteId: $inviteId) - } - """ - ) - - params = {"streamId": stream_id, "inviteId": invite_id} - - return self.make_request( - query=query, - params=params, - return_type="streamInviteCancel", - parse_response=False, - ) + metrics.track(metrics.SDK, self.account, {"name": "Stream Invite Cancel"}) + return super().invite_cancel(stream_id, invite_id) def invite_use(self, stream_id: str, token: str, accept: bool = True) -> bool: """Accept or decline a stream invite @@ -635,29 +240,8 @@ def invite_use(self, stream_id: str, token: str, accept: bool = True) -> bool: Returns: bool -- true if the operation was successful """ - metrics.track(metrics.INVITE, self.account, {"name": "use"}) - self._check_invites_supported() - - query = gql( - """ - mutation StreamInviteUse( - $accept: Boolean!, - $streamId: String!, - $token: String! - ) { - streamInviteUse(accept: $accept, streamId: $streamId, token: $token) - } - """ - ) - - params = {"streamId": stream_id, "token": token, "accept": accept} - - return self.make_request( - query=query, - params=params, - return_type="streamInviteUse", - parse_response=False, - ) + metrics.track(metrics.SDK, self.account, {"name": "Invite Use"}) + return super().invite_use(stream_id, token, accept) def update_permission(self, stream_id: str, user_id: str, role: str): """Updates permissions for a user on a given stream @@ -672,41 +256,8 @@ def update_permission(self, stream_id: str, user_id: str, role: str): Returns: bool -- True if the operation was successful """ - metrics.track( - metrics.PERMISSION, self.account, {"name": "update", "role": role} - ) - if self.server_version and ( - self.server_version != ("dev",) and self.server_version < (2, 6, 4) - ): - raise UnsupportedException( - "Server mutation `update_permission` is only supported as of Speckle" - " Server v2.6.4. Please update your Speckle Server to use this method" - " or use the `grant_permission` method instead." - ) - query = gql( - """ - mutation StreamUpdatePermission( - $permission_params: StreamUpdatePermissionInput! - ) { - streamUpdatePermission(permissionParams: $permission_params) - } - """ - ) - - params = { - "permission_params": { - "streamId": stream_id, - "userId": user_id, - "role": role, - } - } - - return self.make_request( - query=query, - params=params, - return_type="streamUpdatePermission", - parse_response=False, - ) + metrics.track(metrics.SDK, self.account, {"name": "Stream Permission Update", "role": role}) + return super().update_permission(stream_id, user_id, role) def revoke_permission(self, stream_id: str, user_id: str): """Revoke permissions from a user on a given stream @@ -718,25 +269,8 @@ def revoke_permission(self, stream_id: str, user_id: str): Returns: bool -- True if the operation was successful """ - metrics.track(metrics.PERMISSION, self.account, {"name": "revoke"}) - query = gql( - """ - mutation StreamRevokePermission( - $permission_params: StreamRevokePermissionInput! - ) { - streamRevokePermission(permissionParams: $permission_params) - } - """ - ) - - params = {"permission_params": {"streamId": stream_id, "userId": user_id}} - - return self.make_request( - query=query, - params=params, - return_type="streamRevokePermission", - parse_response=False, - ) + metrics.track(metrics.SDK, self.account, {"name": "Stream Permission Revoke"}) + return super().revoke_permission(stream_id, user_id) def activity( self, @@ -765,64 +299,6 @@ def activity( -- oldest cutoff for activity (ie: return all activity _after_ this time) cursor {datetime} -- timestamp cursor for pagination """ - query = gql( - """ - query StreamActivity( - $stream_id: String!, - $action_type: String, - $before:DateTime, - $after: DateTime, - $cursor: DateTime, - $limit: Int - ){ - stream(id: $stream_id) { - activity( - actionType: $action_type, - before: $before, - after: $after, - cursor: $cursor, - limit: $limit - ) { - totalCount - cursor - items { - actionType - info - userId - streamId - resourceId - resourceType - message - time - } - } - } - } - """ - ) - try: - params = { - "stream_id": stream_id, - "limit": limit, - "action_type": action_type, - "before": before.astimezone(timezone.utc).isoformat() - if before - else before, - "after": after.astimezone(timezone.utc).isoformat() if after else after, - "cursor": cursor.astimezone(timezone.utc).isoformat() - if cursor - else cursor, - } - except AttributeError as e: - raise SpeckleException( - "Could not get stream activity - `before`, `after`, and `cursor` must" - " be in `datetime` format if provided", - ValueError(), - ) from e - - return self.make_request( - query=query, - params=params, - return_type=["stream", "activity"], - schema=ActivityCollection, - ) + metrics.track(metrics.SDK, self.account, {"name": "Stream Activity"}) + return super().activity(stream_id, action_type, limit, before, after, cursor) + \ No newline at end of file diff --git a/src/specklepy/api/resources/subscriptions.py b/src/specklepy/api/resources/subscriptions.py index 9651a0fd..d8cbdc90 100644 --- a/src/specklepy/api/resources/subscriptions.py +++ b/src/specklepy/api/resources/subscriptions.py @@ -8,8 +8,8 @@ from specklepy.api.resources.stream import Stream from specklepy.logging.exceptions import SpeckleException -NAME = "subscribe" - +from specklepy.logging import metrics +from specklepy.core.api.resources.subscriptions import Resource as CoreResource def check_wsclient(function): @wraps(function) @@ -24,7 +24,7 @@ async def check_wsclient_wrapper(self, *args, **kwargs): return check_wsclient_wrapper -class Resource(ResourceBase): +class Resource(CoreResource): """API Access class for subscriptions""" def __init__(self, account, basepath, client) -> None: @@ -32,7 +32,6 @@ def __init__(self, account, basepath, client) -> None: account=account, basepath=basepath, client=client, - name=NAME, ) @check_wsclient @@ -47,14 +46,8 @@ async def stream_added(self, callback: Optional[Callable] = None): Returns: Stream -- the update stream """ - query = gql( - """ - subscription { userStreamAdded } - """ - ) - return await self.subscribe( - query=query, callback=callback, return_type="userStreamAdded", schema=Stream - ) + metrics.track(metrics.SDK, self.account, {"name": "Subscription Stream Added"}) + return super().stream_added(callback) @check_wsclient async def stream_updated(self, id: str, callback: Optional[Callable] = None): @@ -71,20 +64,8 @@ async def stream_updated(self, id: str, callback: Optional[Callable] = None): Returns: Stream -- the update stream """ - query = gql( - """ - subscription Update($id: String!) { streamUpdated(streamId: $id) } - """ - ) - params = {"id": id} - - return await self.subscribe( - query=query, - params=params, - callback=callback, - return_type="streamUpdated", - schema=Stream, - ) + metrics.track(metrics.SDK, self.account, {"name": "Subscription Stream Updated"}) + return super().stream_updated(id, callback) @check_wsclient async def stream_removed(self, callback: Optional[Callable] = None): @@ -102,18 +83,8 @@ async def stream_removed(self, callback: Optional[Callable] = None): Returns: dict -- dict containing 'id' of stream removed and optionally 'revokedBy' """ - query = gql( - """ - subscription { userStreamRemoved } - """ - ) - - return await self.subscribe( - query=query, - callback=callback, - return_type="userStreamRemoved", - parse_response=False, - ) + metrics.track(metrics.SDK, self.account, {"name": "Subscription Stream Removed"}) + return super().stream_removed(callback) @check_wsclient async def subscribe( diff --git a/src/specklepy/api/resources/user.py b/src/specklepy/api/resources/user.py index e46b1ac2..2aa2bf69 100644 --- a/src/specklepy/api/resources/user.py +++ b/src/specklepy/api/resources/user.py @@ -6,10 +6,11 @@ from specklepy.api.models import ActivityCollection, PendingStreamCollaborator, User from specklepy.api.resource import ResourceBase -from specklepy.logging import metrics + from specklepy.logging.exceptions import SpeckleException -NAME = "user" +from specklepy.logging import metrics +from specklepy.core.api.resources.user import Resource as CoreResource DEPRECATION_VERSION = "2.9.0" DEPRECATION_TEXT = ( @@ -18,7 +19,7 @@ ) -class Resource(ResourceBase): +class Resource(CoreResource): """API Access class for users""" def __init__(self, account, basepath, client, server_version) -> None: @@ -26,7 +27,6 @@ def __init__(self, account, basepath, client, server_version) -> None: account=account, basepath=basepath, client=client, - name=NAME, server_version=server_version, ) self.schema = User @@ -44,29 +44,9 @@ def get(self, id: Optional[str] = None) -> User: Returns: User -- the retrieved user """ - metrics.track(metrics.USER, self.account, {"name": "get"}) - query = gql( - """ - query User($id: String) { - user(id: $id) { - id - email - name - bio - company - avatar - verified - profiles - role - } - } - """ - ) - - params = {"id": id} - - return self.make_request(query=query, params=params, return_type="user") - + metrics.track(metrics.SDK, self.account, {"name": "User Get_deprecated"}) + return super().get(id) + @deprecated(version=DEPRECATION_VERSION, reason=DEPRECATION_TEXT) def search( self, search_query: str, limit: int = 25 @@ -81,33 +61,8 @@ def search( Returns: List[User] -- a list of User objects that match the search query """ - if len(search_query) < 3: - return SpeckleException( - message="User search query must be at least 3 characters" - ) - - metrics.track(metrics.USER, self.account, {"name": "search"}) - query = gql( - """ - query UserSearch($search_query: String!, $limit: Int!) { - userSearch(query: $search_query, limit: $limit) { - items { - id - name - bio - company - avatar - verified - } - } - } - """ - ) - params = {"search_query": search_query, "limit": limit} - - return self.make_request( - query=query, params=params, return_type=["userSearch", "items"] - ) + metrics.track(metrics.SDK, self.account, {"name": "User Search_deprecated"}) + return super().search(search_query, limit) @deprecated(version=DEPRECATION_VERSION, reason=DEPRECATION_TEXT) def update( @@ -128,28 +83,9 @@ def update( Returns: bool -- True if your profile was updated successfully """ - metrics.track(metrics.USER, self.account, {"name": "update"}) - query = gql( - """ - mutation UserUpdate($user: UserUpdateInput!) { - userUpdate(user: $user) - } - """ - ) - params = {"name": name, "company": company, "bio": bio, "avatar": avatar} - - params = {"user": {k: v for k, v in params.items() if v is not None}} - - if not params["user"]: - return SpeckleException( - message=( - "You must provide at least one field to update your user profile" - ) - ) - - return self.make_request( - query=query, params=params, return_type="userUpdate", parse_response=False - ) + #metrics.track(metrics.USER, self.account, {"name": "update"}) + metrics.track(metrics.SDK, self.account, {"name": "User Update_deprecated"}) + return super().update(name, company, bio, avatar) @deprecated(version=DEPRECATION_VERSION, reason=DEPRECATION_TEXT) def activity( @@ -180,58 +116,9 @@ def activity( -- oldest cutoff for activity (ie: return all activity _after_ this time) cursor {datetime} -- timestamp cursor for pagination """ - - query = gql( - """ - query UserActivity( - $user_id: String, - $action_type: String, - $before:DateTime, - $after: DateTime, - $cursor: DateTime, - $limit: Int - ){ - user(id: $user_id) { - activity( - actionType: $action_type, - before: $before, - after: $after, - cursor: $cursor, - limit: $limit - ) { - totalCount - cursor - items { - actionType - info - userId - streamId - resourceId - resourceType - message - time - } - } - } - } - """ - ) - - params = { - "user_id": user_id, - "limit": limit, - "action_type": action_type, - "before": before.astimezone(timezone.utc).isoformat() if before else before, - "after": after.astimezone(timezone.utc).isoformat() if after else after, - "cursor": cursor.astimezone(timezone.utc).isoformat() if cursor else cursor, - } - - return self.make_request( - query=query, - params=params, - return_type=["user", "activity"], - schema=ActivityCollection, - ) + metrics.track(metrics.SDK, self.account, {"name": "User Activity_deprecated"}) + return super().activity(user_id, limit, action_type, before, after, cursor) + @deprecated(version=DEPRECATION_VERSION, reason=DEPRECATION_TEXT) def get_all_pending_invites(self) -> List[PendingStreamCollaborator]: @@ -243,36 +130,10 @@ def get_all_pending_invites(self) -> List[PendingStreamCollaborator]: List[PendingStreamCollaborator] -- a list of pending invites for the current user """ - metrics.track(metrics.INVITE, self.account, {"name": "get"}) - self._check_invites_supported() - - query = gql( - """ - query StreamInvites { - streamInvites{ - id - token - inviteId - streamId - streamName - title - role - invitedBy { - id - name - company - avatar - } - } - } - """ - ) - - return self.make_request( - query=query, - return_type="streamInvites", - schema=PendingStreamCollaborator, - ) + #metrics.track(metrics.INVITE, self.account, {"name": "get"}) + metrics.track(metrics.SDK, self.account, {"name": "User GetAllInvites_deprecated"}) + return super().get_all_pending_invites() + @deprecated(version=DEPRECATION_VERSION, reason=DEPRECATION_TEXT) def get_pending_invite( @@ -291,37 +152,7 @@ def get_pending_invite( PendingStreamCollaborator -- the invite for the given stream (or None if it isn't found) """ - metrics.track(metrics.INVITE, self.account, {"name": "get"}) - self._check_invites_supported() - - query = gql( - """ - query StreamInvite($streamId: String!, $token: String) { - streamInvite(streamId: $streamId, token: $token) { - id - token - streamId - streamName - title - role - invitedBy { - id - name - company - avatar - } - } - } - """ - ) - - params = {"streamId": stream_id} - if token: - params["token"] = token - - return self.make_request( - query=query, - params=params, - return_type="streamInvite", - schema=PendingStreamCollaborator, - ) + #metrics.track(metrics.INVITE, self.account, {"name": "get"}) + metrics.track(metrics.SDK, self.account, {"name": "User GetInvite_deprecated"}) + return super().get_pending_invite(stream_id, token) + \ No newline at end of file diff --git a/src/specklepy/api/wrapper.py b/src/specklepy/api/wrapper.py index b2074c6c..eb480d19 100644 --- a/src/specklepy/api/wrapper.py +++ b/src/specklepy/api/wrapper.py @@ -7,12 +7,13 @@ get_account_from_token, get_local_accounts, ) -from specklepy.logging import metrics from specklepy.logging.exceptions import SpeckleException, SpeckleWarning from specklepy.transports.server.server import ServerTransport +from specklepy.logging import metrics +from specklepy.core.api.wrapper import StreamWrapper as CoreStreamWrapper -class StreamWrapper: +class StreamWrapper(CoreStreamWrapper): """ The `StreamWrapper` gives you some handy helpers to deal with urls and get authenticated clients and transports. @@ -49,93 +50,16 @@ class StreamWrapper: _client: SpeckleClient = None _account: Account = None - def __repr__(self): - return ( - f"StreamWrapper( server: {self.host}, stream_id: {self.stream_id}, type:" - f" {self.type} )" - ) - - def __str__(self) -> str: - return self.__repr__() - - @property - def type(self) -> str: - if self.object_id: - return "object" - elif self.commit_id: - return "commit" - elif self.branch_name: - return "branch" - else: - return "stream" if self.stream_id else "invalid" - def __init__(self, url: str) -> None: - self.stream_url = url - parsed = urlparse(url) - self.host = parsed.netloc - self.use_ssl = parsed.scheme == "https" - segments = parsed.path.strip("/").split("/", 3) - metrics.track(metrics.STREAM_WRAPPER, self.get_account()) - - if not segments or len(segments) < 2: - raise SpeckleException( - f"Cannot parse {url} into a stream wrapper class - invalid URL" - " provided." - ) - - while segments: - segment = segments.pop(0) - if segments and segment.lower() == "streams": - self.stream_id = segments.pop(0) - elif segments and segment.lower() == "commits": - self.commit_id = segments.pop(0) - elif segments and segment.lower() == "branches": - self.branch_name = unquote(segments.pop(0)) - elif segments and segment.lower() == "objects": - self.object_id = segments.pop(0) - elif segment.lower() == "globals": - self.branch_name = "globals" - if segments: - self.commit_id = segments.pop(0) - else: - raise SpeckleException( - f"Cannot parse {url} into a stream wrapper class - invalid URL" - " provided." - ) - - if not self.stream_id: - raise SpeckleException( - f"Cannot parse {url} into a stream wrapper class - no stream id found." - ) - - @property - def server_url(self): - return f"{'https' if self.use_ssl else 'http'}://{self.host}" + super().__init__(url = url) def get_account(self, token: str = None) -> Account: """ Gets an account object for this server from the local accounts db (added via Speckle Manager or a json file) """ - if self._account and self._account.token: - return self._account - - self._account = next( - ( - a - for a in get_local_accounts() - if self.host == urlparse(a.serverInfo.url).netloc - ), - None, - ) - - if not self._account: - self._account = get_account_from_token(token, self.server_url) - - if self._client: - self._client.authenticate_with_account(self._account) - - return self._account + metrics.track(metrics.SDK, custom_props={"name": "Stream Wrapper Get Account"}) + return super().get_account(token) def get_client(self, token: str = None) -> SpeckleClient: """ @@ -152,25 +76,8 @@ def get_client(self, token: str = None) -> SpeckleClient: SpeckleClient -- authenticated with a corresponding local account or the provided token """ - if self._client and token is None: - return self._client - - if not self._account or not self._account.token: - self.get_account(token) - - if not self._client: - self._client = SpeckleClient(host=self.host, use_ssl=self.use_ssl) - - if self._account.token is None and token is None: - warn(f"No local account found for server {self.host}", SpeckleWarning) - return self._client - - if self._account.token: - self._client.authenticate_with_account(self._account) - else: - self._client.authenticate_with_token(token) - - return self._client + metrics.track(metrics.SDK, custom_props={"name": "Stream Wrapper Get Client"}) + return super().get_client(token) def get_transport(self, token: str = None) -> ServerTransport: """ @@ -183,6 +90,5 @@ def get_transport(self, token: str = None) -> ServerTransport: ServerTransport -- constructed for this stream with a pre-authenticated client """ - if not self._account or not self._account.token: - self.get_account(token) - return ServerTransport(self.stream_id, account=self._account) + metrics.track(metrics.SDK, custom_props={"name": "Stream Wrapper Get Transport"}) + return super().get_transport(token) diff --git a/src/specklepy/core/api/__init__.py b/src/specklepy/core/api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/specklepy/core/api/client.py b/src/specklepy/core/api/client.py new file mode 100644 index 00000000..197fcc96 --- /dev/null +++ b/src/specklepy/core/api/client.py @@ -0,0 +1,242 @@ +import re +from typing import Dict +from warnings import warn + +from deprecated import deprecated +from gql import Client +from gql.transport.exceptions import TransportServerError +from gql.transport.requests import RequestsHTTPTransport +from gql.transport.websockets import WebsocketsTransport + +from specklepy.core.api import resources +from specklepy.core.api.credentials import Account, get_account_from_token +from specklepy.core.api.resources import ( + user, + active_user, + branch, + commit, + object, + other_user, + server, + stream, + subscriptions, +) +from specklepy.logging import metrics +from specklepy.logging.exceptions import SpeckleException, SpeckleWarning + + +class SpeckleClient: + """ + The `SpeckleClient` is your entry point for interacting with + your Speckle Server's GraphQL API. + You'll need to have access to a server to use it, + or you can use our public server `speckle.xyz`. + + To authenticate the client, you'll need to have downloaded + the [Speckle Manager](https://speckle.guide/#speckle-manager) + and added your account. + + ```py + from specklepy.api.client import SpeckleClient + from specklepy.api.credentials import get_default_account + + # initialise the client + client = SpeckleClient(host="speckle.xyz") # or whatever your host is + # client = SpeckleClient(host="localhost:3000", use_ssl=False) or use local server + + # authenticate the client with an account (account has been added in Speckle Manager) + account = get_default_account() + client.authenticate_with_account(account) + + # create a new stream. this returns the stream id + new_stream_id = client.stream.create(name="a shiny new stream") + + # use that stream id to get the stream from the server + new_stream = client.stream.get(id=new_stream_id) + ``` + """ + + DEFAULT_HOST = "speckle.xyz" + USE_SSL = True + + def __init__(self, host: str = DEFAULT_HOST, use_ssl: bool = USE_SSL) -> None: + ws_protocol = "ws" + http_protocol = "http" + + if use_ssl: + ws_protocol = "wss" + http_protocol = "https" + + # sanitise host input by removing protocol and trailing slash + host = re.sub(r"((^\w+:|^)\/\/)|(\/$)", "", host) + + self.url = f"{http_protocol}://{host}" + self.graphql = f"{self.url}/graphql" + self.ws_url = f"{ws_protocol}://{host}/graphql" + self.account = Account() + + self.httpclient = Client( + transport=RequestsHTTPTransport(url=self.graphql, verify=True, retries=3) + ) + self.wsclient = None + + self._init_resources() + + # ? Check compatibility with the server - i think we can skip this at this point? save a request + # try: + # server_info = self.server.get() + # if isinstance(server_info, Exception): + # raise server_info + # if not isinstance(server_info, ServerInfo): + # raise Exception("Couldn't get ServerInfo") + # except Exception as ex: + # raise SpeckleException( + # f"{self.url} is not a compatible Speckle Server", ex + # ) from ex + + def __repr__(self): + return ( + f"SpeckleClient( server: {self.url}, authenticated:" + f" {self.account.token is not None} )" + ) + + @deprecated( + version="2.6.0", + reason=( + "Renamed: please use `authenticate_with_account` or" + " `authenticate_with_token` instead." + ), + ) + def authenticate(self, token: str) -> None: + """Authenticate the client using a personal access token + The token is saved in the client object and a synchronous GraphQL + entrypoint is created + + Arguments: + token {str} -- an api token + """ + self.authenticate_with_token(token) + self._set_up_client() + + def authenticate_with_token(self, token: str) -> None: + """ + Authenticate the client using a personal access token. + The token is saved in the client object and a synchronous GraphQL + entrypoint is created + + Arguments: + token {str} -- an api token + """ + self.account = get_account_from_token(token, self.url) + self._set_up_client() + + def authenticate_with_account(self, account: Account) -> None: + """Authenticate the client using an Account object + The account is saved in the client object and a synchronous GraphQL + entrypoint is created + + Arguments: + account {Account} -- the account object which can be found with + `get_default_account` or `get_local_accounts` + """ + self.account = account + self._set_up_client() + + def _set_up_client(self) -> None: + headers = { + "Authorization": f"Bearer {self.account.token}", + "Content-Type": "application/json", + "apollographql-client-name": metrics.HOST_APP, + "apollographql-client-version": metrics.HOST_APP_VERSION, + } + httptransport = RequestsHTTPTransport( + url=self.graphql, headers=headers, verify=True, retries=3 + ) + wstransport = WebsocketsTransport( + url=self.ws_url, + init_payload={"Authorization": f"Bearer {self.account.token}"}, + ) + self.httpclient = Client(transport=httptransport) + self.wsclient = Client(transport=wstransport) + + self._init_resources() + + try: + user_or_error = self.active_user.get() + if isinstance(user_or_error, SpeckleException): + if isinstance(user_or_error.exception, TransportServerError): + raise user_or_error.exception + else: + raise user_or_error + except TransportServerError as ex: + if ex.code == 403: + warn( + SpeckleWarning( + "Possibly invalid token - could not authenticate Speckle Client" + f" for server {self.url}" + ) + ) + else: + raise ex + + def execute_query(self, query: str) -> Dict: + return self.httpclient.execute(query) + + def _init_resources(self) -> None: + self.server = server.Resource( + account=self.account, basepath=self.url, client=self.httpclient + ) + server_version = None + try: + server_version = self.server.version() + except Exception: + pass + self.user = user.Resource( + account=self.account, + basepath=self.url, + client=self.httpclient, + server_version=server_version, + ) + self.other_user = other_user.Resource( + account=self.account, + basepath=self.url, + client=self.httpclient, + server_version=server_version, + ) + self.active_user = active_user.Resource( + account=self.account, + basepath=self.url, + client=self.httpclient, + server_version=server_version, + ) + self.stream = stream.Resource( + account=self.account, + basepath=self.url, + client=self.httpclient, + server_version=server_version, + ) + self.commit = commit.Resource( + account=self.account, basepath=self.url, client=self.httpclient + ) + self.branch = branch.Resource( + account=self.account, basepath=self.url, client=self.httpclient + ) + self.object = object.Resource( + account=self.account, basepath=self.url, client=self.httpclient + ) + self.subscribe = subscriptions.Resource( + account=self.account, + basepath=self.ws_url, + client=self.wsclient, + ) + + def __getattr__(self, name): + try: + attr = getattr(resources, name) + return attr.Resource( + account=self.account, basepath=self.url, client=self.httpclient + ) + except AttributeError: + raise SpeckleException( + f"Method {name} is not supported by the SpeckleClient class" + ) diff --git a/src/specklepy/core/api/credentials.py b/src/specklepy/core/api/credentials.py new file mode 100644 index 00000000..f83df899 --- /dev/null +++ b/src/specklepy/core/api/credentials.py @@ -0,0 +1,154 @@ +import os +from pathlib import Path +from typing import List, Optional + +from pydantic import BaseModel, Field # pylint: disable=no-name-in-module + +from specklepy.core.api.models import ServerInfo +from specklepy.core.helpers import speckle_path_provider +from specklepy.logging import metrics +from specklepy.logging.exceptions import SpeckleException +from specklepy.transports.sqlite import SQLiteTransport + + +class UserInfo(BaseModel): + name: Optional[str] = None + email: Optional[str] = None + company: Optional[str] = None + id: Optional[str] = None + + +class Account(BaseModel): + isDefault: bool = False + token: Optional[str] = None + refreshToken: Optional[str] = None + serverInfo: ServerInfo = Field(default_factory=ServerInfo) + userInfo: UserInfo = Field(default_factory=UserInfo) + id: Optional[str] = None + + def __repr__(self) -> str: + return ( + f"Account(email: {self.userInfo.email}, server: {self.serverInfo.url}," + f" isDefault: {self.isDefault})" + ) + + def __str__(self) -> str: + return self.__repr__() + + @classmethod + def from_token(cls, token: str, server_url: str = None): + acct = cls(token=token) + acct.serverInfo.url = server_url + return acct + + +def get_local_accounts(base_path: Optional[str] = None) -> List[Account]: + """Gets all the accounts present in this environment + + Arguments: + base_path {str} -- custom base path if you are not using the system default + + Returns: + List[Account] -- list of all local accounts or an empty list if + no accounts were found + """ + accounts: List[Account] = [] + try: + account_storage = SQLiteTransport(scope="Accounts", base_path=base_path) + res = account_storage.get_all_objects() + account_storage.close() + if res: + accounts.extend(Account.model_validate_json(r[1]) for r in res) + except SpeckleException: + # cannot open SQLiteTransport, probably because of the lack + # of disk write permissions + pass + + json_acct_files = [] + json_path = str(speckle_path_provider.accounts_folder_path()) + try: + os.makedirs(json_path, exist_ok=True) + json_acct_files.extend( + file for file in os.listdir(json_path) if file.endswith(".json") + ) + + except Exception: + # cannot find or get the json account paths + pass + + if json_acct_files: + try: + accounts.extend( + Account.model_validate_json(Path(json_path, json_file).read_text()) + # Account.parse_file(os.path.join(json_path, json_file)) + for json_file in json_acct_files + ) + except Exception as ex: + raise SpeckleException( + "Invalid json accounts could not be read. Please fix or remove them.", + ex, + ) from ex + + return accounts + + +def get_default_account(base_path: Optional[str] = None) -> Optional[Account]: + """ + Gets this environment's default account if any. If there is no default, + the first found will be returned and set as default. + Arguments: + base_path {str} -- custom base path if you are not using the system default + + Returns: + Account -- the default account or None if no local accounts were found + """ + accounts = get_local_accounts(base_path=base_path) + if not accounts: + return None + + default = next((acc for acc in accounts if acc.isDefault), None) + if not default: + default = accounts[0] + default.isDefault = True + #metrics.initialise_tracker(default) + + return default + + +def get_account_from_token(token: str, server_url: str = None) -> Account: + """Gets the local account for the token if it exists + Arguments: + token {str} -- the api token + + Returns: + Account -- the local account with this token or a shell account containing + just the token and url if no local account is found + """ + accounts = get_local_accounts() + if not accounts: + return Account.from_token(token, server_url) + + acct = next((acc for acc in accounts if acc.token == token), None) + if acct: + return acct + + if server_url: + url = server_url.lower() + acct = next( + (acc for acc in accounts if url in acc.serverInfo.url.lower()), None + ) + if acct: + return acct + + return Account.from_token(token, server_url) + + +class StreamWrapper: + def __init__(self, url: str = None) -> None: + raise SpeckleException( + message=( + "The StreamWrapper has moved as of v2.6.0! Please import from" + " specklepy.api.wrapper" + ), + exception=DeprecationWarning(), + ) diff --git a/src/specklepy/core/api/host_applications.py b/src/specklepy/core/api/host_applications.py new file mode 100644 index 00000000..0a362832 --- /dev/null +++ b/src/specklepy/core/api/host_applications.py @@ -0,0 +1,116 @@ +from dataclasses import dataclass +from enum import Enum +from unicodedata import name + + +class HostAppVersion(Enum): + v = "v" + v6 = "v6" + v7 = "v7" + v2019 = "v2019" + v2020 = "v2020" + v2021 = "v2021" + v2022 = "v2022" + v2023 = "v2023" + v2024 = "v2024" + v2025 = "v2025" + vSandbox = "vSandbox" + vRevit = "vRevit" + vRevit2021 = "vRevit2021" + vRevit2022 = "vRevit2022" + vRevit2023 = "vRevit2023" + vRevit2024 = "vRevit2024" + vRevit2025 = "vRevit2025" + v25 = "v25" + v26 = "v26" + + def __repr__(self) -> str: + return self.value + + def __str__(self) -> str: + return self.value + + +@dataclass +class HostApplication: + name: str + slug: str + + def get_version(self, version: HostAppVersion) -> str: + return f"{name.replace(' ', '')}{str(version).strip('v')}" + + +RHINO = HostApplication("Rhino", "rhino") +GRASSHOPPER = HostApplication("Grasshopper", "grasshopper") +REVIT = HostApplication("Revit", "revit") +DYNAMO = HostApplication("Dynamo", "dynamo") +UNITY = HostApplication("Unity", "unity") +GSA = HostApplication("GSA", "gsa") +CIVIL = HostApplication("Civil 3D", "civil3d") +AUTOCAD = HostApplication("AutoCAD", "autocad") +MICROSTATION = HostApplication("MicroStation", "microstation") +OPENROADS = HostApplication("OpenRoads", "openroads") +OPENRAIL = HostApplication("OpenRail", "openrail") +OPENBUILDINGS = HostApplication("OpenBuildings", "openbuildings") +ETABS = HostApplication("ETABS", "etabs") +SAP2000 = HostApplication("SAP2000", "sap2000") +CSIBRIDGE = HostApplication("CSIBridge", "csibridge") +SAFE = HostApplication("SAFE", "safe") +TEKLASTRUCTURES = HostApplication("Tekla Structures", "teklastructures") +DXF = HostApplication("DXF Converter", "dxf") +EXCEL = HostApplication("Excel", "excel") +UNREAL = HostApplication("Unreal", "unreal") +POWERBI = HostApplication("Power BI", "powerbi") +BLENDER = HostApplication("Blender", "blender") +QGIS = HostApplication("QGIS", "qgis") +ARCGIS = HostApplication("ArcGIS", "arcgis") +SKETCHUP = HostApplication("SketchUp", "sketchup") +ARCHICAD = HostApplication("Archicad", "archicad") +TOPSOLID = HostApplication("TopSolid", "topsolid") +PYTHON = HostApplication("Python", "python") +NET = HostApplication(".NET", "net") +OTHER = HostApplication("Other", "other") + +_app_name_host_app_mapping = { + "dynamo": DYNAMO, + "revit": REVIT, + "autocad": AUTOCAD, + "civil": CIVIL, + "rhino": RHINO, + "grasshopper": GRASSHOPPER, + "unity": UNITY, + "gsa": GSA, + "microstation": MICROSTATION, + "openroads": OPENROADS, + "openrail": OPENRAIL, + "openbuildings": OPENBUILDINGS, + "etabs": ETABS, + "sap": SAP2000, + "csibridge": CSIBRIDGE, + "safe": SAFE, + "teklastructures": TEKLASTRUCTURES, + "dxf": DXF, + "excel": EXCEL, + "unreal": UNREAL, + "powerbi": POWERBI, + "blender": BLENDER, + "qgis": QGIS, + "arcgis": ARCGIS, + "sketchup": SKETCHUP, + "archicad": ARCHICAD, + "topsolid": TOPSOLID, + "python": PYTHON, + "net": NET, +} + + +def get_host_app_from_string(app_name: str) -> HostApplication: + app_name = app_name.lower().replace(" ", "") + for partial_app_name, host_app in _app_name_host_app_mapping.items(): + if partial_app_name in app_name: + return host_app + return HostApplication(app_name, app_name) + + +if __name__ == "__main__": + print(HostAppVersion.v) diff --git a/src/specklepy/core/api/models.py b/src/specklepy/core/api/models.py new file mode 100644 index 00000000..273e55ab --- /dev/null +++ b/src/specklepy/core/api/models.py @@ -0,0 +1,198 @@ +from datetime import datetime +from typing import List, Optional + +from pydantic import BaseModel, Field + + +class Collaborator(BaseModel): + id: Optional[str] = None + name: Optional[str] = None + role: Optional[str] = None + avatar: Optional[str] = None + + +class Commit(BaseModel): + id: Optional[str] = None + message: Optional[str] = None + authorName: Optional[str] = None + authorId: Optional[str] = None + authorAvatar: Optional[str] = None + branchName: Optional[str] = None + createdAt: Optional[datetime] = None + sourceApplication: Optional[str] = None + referencedObject: Optional[str] = None + totalChildrenCount: Optional[int] = None + parents: Optional[List[str]] = None + + def __repr__(self) -> str: + return ( + f"Commit( id: {self.id}, message: {self.message}, referencedObject:" + f" {self.referencedObject}, authorName: {self.authorName}, branchName:" + f" {self.branchName}, createdAt: {self.createdAt} )" + ) + + def __str__(self) -> str: + return self.__repr__() + + +class Commits(BaseModel): + totalCount: Optional[int] = None + cursor: Optional[datetime] = None + items: List[Commit] = [] + + +class Object(BaseModel): + id: Optional[str] = None + speckleType: Optional[str] = None + applicationId: Optional[str] = None + totalChildrenCount: Optional[int] = None + createdAt: Optional[datetime] = None + + +class Branch(BaseModel): + id: Optional[str] = None + name: Optional[str] = None + description: Optional[str] = None + commits: Optional[Commits] = None + + +class Branches(BaseModel): + totalCount: Optional[int] = None + cursor: Optional[datetime] = None + items: List[Branch] = [] + + +class Stream(BaseModel): + id: Optional[str] = None + name: Optional[str] = None + role: Optional[str] = None + isPublic: Optional[bool] = None + description: Optional[str] = None + createdAt: Optional[datetime] = None + updatedAt: Optional[datetime] = None + collaborators: List[Collaborator] = Field(default_factory=list) + branches: Optional[Branches] = None + commit: Optional[Commit] = None + object: Optional[Object] = None + commentCount: Optional[int] = None + favoritedDate: Optional[datetime] = None + favoritesCount: Optional[int] = None + + def __repr__(self): + return ( + f"Stream( id: {self.id}, name: {self.name}, description:" + f" {self.description}, isPublic: {self.isPublic})" + ) + + def __str__(self) -> str: + return self.__repr__() + + +class Streams(BaseModel): + totalCount: Optional[int] = None + cursor: Optional[datetime] = None + items: List[Stream] = [] + + +class User(BaseModel): + id: Optional[str] = None + email: Optional[str] = None + name: Optional[str] = None + bio: Optional[str] = None + company: Optional[str] = None + avatar: Optional[str] = None + verified: Optional[bool] = None + role: Optional[str] = None + streams: Optional[Streams] = None + + def __repr__(self): + return ( + f"User( id: {self.id}, name: {self.name}, email: {self.email}, company:" + f" {self.company} )" + ) + + def __str__(self) -> str: + return self.__repr__() + + +class LimitedUser(BaseModel): + """Limited user type, for showing public info about a user to another user.""" + + id: str + name: Optional[str] = None + bio: Optional[str] = None + company: Optional[str] = None + avatar: Optional[str] = None + verified: Optional[bool] = None + role: Optional[str] = None + + +class PendingStreamCollaborator(BaseModel): + id: Optional[str] = None + inviteId: Optional[str] = None + streamId: Optional[str] = None + streamName: Optional[str] = None + title: Optional[str] = None + role: Optional[str] = None + invitedBy: Optional[User] = None + user: Optional[User] = None + token: Optional[str] = None + + def __repr__(self): + return ( + f"PendingStreamCollaborator( inviteId: {self.inviteId}, streamId:" + f" {self.streamId}, role: {self.role}, title: {self.title}, invitedBy:" + f" {self.user.name if self.user else None})" + ) + + def __str__(self) -> str: + return self.__repr__() + + +class Activity(BaseModel): + actionType: Optional[str] = None + info: Optional[dict] = None + userId: Optional[str] = None + streamId: Optional[str] = None + resourceId: Optional[str] = None + resourceType: Optional[str] = None + message: Optional[str] = None + time: Optional[datetime] = None + + def __repr__(self) -> str: + return ( + f"Activity( streamId: {self.streamId}, actionType: {self.actionType}," + f" message: {self.message}, userId: {self.userId} )" + ) + + def __str__(self) -> str: + return self.__repr__() + + +class ActivityCollection(BaseModel): + totalCount: Optional[int] = None + items: Optional[List[Activity]] = None + cursor: Optional[datetime] = None + + def __repr__(self) -> str: + return ( + f"ActivityCollection( totalCount: {self.totalCount}, items:" + f" {len(self.items) if self.items else 0}, cursor:" + f" {self.cursor.isoformat() if self.cursor else None} )" + ) + + def __str__(self) -> str: + return self.__repr__() + + +class ServerInfo(BaseModel): + name: Optional[str] = None + company: Optional[str] = None + url: Optional[str] = None + description: Optional[str] = None + adminContact: Optional[str] = None + canonicalUrl: Optional[str] = None + roles: Optional[List[dict]] = None + scopes: Optional[List[dict]] = None + authStrategies: Optional[List[dict]] = None + version: Optional[str] = None diff --git a/src/specklepy/core/api/operations.py b/src/specklepy/core/api/operations.py new file mode 100644 index 00000000..a09bc48d --- /dev/null +++ b/src/specklepy/core/api/operations.py @@ -0,0 +1,139 @@ +from typing import List, Optional + +#from specklepy.logging import metrics +from specklepy.logging.exceptions import SpeckleException +from specklepy.objects.base import Base +from specklepy.serialization.base_object_serializer import BaseObjectSerializer +from specklepy.transports.abstract_transport import AbstractTransport +from specklepy.transports.sqlite import SQLiteTransport + + +def send( + base: Base, + transports: Optional[List[AbstractTransport]] = None, + use_default_cache: bool = True, +): + """Sends an object via the provided transports. Defaults to the local cache. + + Arguments: + obj {Base} -- the object you want to send + transports {list} -- where you want to send them + use_default_cache {bool} -- toggle for the default cache. + If set to false, it will only send to the provided transports + + Returns: + str -- the object id of the sent object + """ + + if not transports and not use_default_cache: + raise SpeckleException( + message=( + "You need to provide at least one transport: cannot send with an empty" + " transport list and no default cache" + ) + ) + + if isinstance(transports, AbstractTransport): + transports = [transports] + + if transports is None: + transports = [] + + if use_default_cache: + transports.insert(0, SQLiteTransport()) + + serializer = BaseObjectSerializer(write_transports=transports) + + obj_hash, _ = serializer.write_json(base=base) + + return obj_hash + + +def receive( + obj_id: str, + remote_transport: Optional[AbstractTransport] = None, + local_transport: Optional[AbstractTransport] = None, +) -> Base: + """Receives an object from a transport. + + Arguments: + obj_id {str} -- the id of the object to receive + remote_transport {Transport} -- the transport to receive from + local_transport {Transport} -- the local cache to check for existing objects + (defaults to `SQLiteTransport`) + + Returns: + Base -- the base object + """ + if not local_transport: + local_transport = SQLiteTransport() + + serializer = BaseObjectSerializer(read_transport=local_transport) + + # try local transport first. if the parent is there, we assume all the children are there and continue with deserialization using the local transport + obj_string = local_transport.get_object(obj_id) + if obj_string: + return serializer.read_json(obj_string=obj_string) + + if not remote_transport: + raise SpeckleException( + message=( + "Could not find the specified object using the local transport, and you" + " didn't provide a fallback remote from which to pull it." + ) + ) + + obj_string = remote_transport.copy_object_and_children( + id=obj_id, target_transport=local_transport + ) + + return serializer.read_json(obj_string=obj_string) + + +def serialize(base: Base, write_transports: List[AbstractTransport] = []) -> str: + """ + Serialize a base object. If no write transports are provided, + the object will be serialized + without detaching or chunking any of the attributes. + + Arguments: + base {Base} -- the object to serialize + write_transports {List[AbstractTransport]} + -- optional: the transports to write to + + Returns: + str -- the serialized object + """ + serializer = BaseObjectSerializer(write_transports=write_transports) + + return serializer.write_json(base)[1] + + +def deserialize( + obj_string: str, read_transport: Optional[AbstractTransport] = None +) -> Base: + """ + Deserialize a string object into a Base object. + + If the object contains referenced child objects that are not stored in the local db, + a read transport needs to be provided in order to recompose + the base with the children objects. + + Arguments: + obj_string {str} -- the string object to deserialize + read_transport {AbstractTransport} + -- the transport to fetch children objects from + (defaults to SQLiteTransport) + + Returns: + Base -- the deserialized object + """ + if not read_transport: + read_transport = SQLiteTransport() + + serializer = BaseObjectSerializer(read_transport=read_transport) + + return serializer.read_json(obj_string=obj_string) + + +__all__ = ["receive", "send", "serialize", "deserialize"] diff --git a/src/specklepy/core/api/resource.py b/src/specklepy/core/api/resource.py new file mode 100644 index 00000000..acefc639 --- /dev/null +++ b/src/specklepy/core/api/resource.py @@ -0,0 +1,131 @@ +from threading import Lock +from typing import Any, Dict, List, Optional, Tuple, Type, Union + +from gql.client import Client +from gql.transport.exceptions import TransportQueryError +from graphql import DocumentNode + +from specklepy.core.api.credentials import Account +from specklepy.logging.exceptions import ( + GraphQLException, + SpeckleException, + UnsupportedException, +) +from specklepy.serialization.base_object_serializer import BaseObjectSerializer +from specklepy.transports.sqlite import SQLiteTransport + + +class ResourceBase(object): + def __init__( + self, + account: Account, + basepath: str, + client: Client, + name: str, + server_version: Optional[Tuple[Any, ...]] = None, + ) -> None: + self.account = account + self.basepath = basepath + self.client = client + self.name = name + self.server_version = server_version + self.schema: Optional[Type] = None + self.__lock = Lock() + + def _step_into_response(self, response: dict, return_type: Union[str, List, None]): + """Step into the dict to get the relevant data""" + if return_type is None: + return response + if isinstance(return_type, str): + return response[return_type] + if isinstance(return_type, List): + for key in return_type: + response = response[key] + return response + + def _parse_response(self, response: Union[dict, list, None], schema=None): + """Try to create a class instance from the response""" + if response is None: + return None + if isinstance(response, list): + return [self._parse_response(response=r, schema=schema) for r in response] + if schema: + return schema.model_validate(response) + elif self.schema: + try: + return self.schema.model_validate(response) + except Exception: + s = BaseObjectSerializer(read_transport=SQLiteTransport()) + return s.recompose_base(response) + else: + return response + + def make_request( + self, + query: DocumentNode, + params: Optional[Dict] = None, + return_type: Union[str, List, None] = None, + schema=None, + parse_response: bool = True, + ) -> Any: + """Executes the GraphQL query""" + try: + with self.__lock: + response = self.client.execute(query, variable_values=params) + except Exception as ex: + if isinstance(ex, TransportQueryError): + return GraphQLException( + message=( + f"Failed to execute the GraphQL {self.name} request. Errors:" + f" {ex.errors}" + ), + errors=ex.errors, + data=ex.data, + ) + else: + return SpeckleException( + message=( + f"Failed to execute the GraphQL {self.name} request. Inner" + f" exception: {ex}" + ), + exception=ex, + ) + + response = self._step_into_response(response=response, return_type=return_type) + + if parse_response: + return self._parse_response(response=response, schema=schema) + else: + return response + + def _check_server_version_at_least( + self, target_version: Tuple[Any, ...], unsupported_message: Optional[str] = None + ): + """Use this check to guard against making unsupported requests on older servers. + + Arguments: + target_version {tuple} + the minimum server version in the format (major, minor, patch, (tag, build)) + eg (2, 6, 3) for a stable build and (2, 6, 4, 'alpha', 4711) for alpha + """ + if not unsupported_message: + unsupported_message = ( + "The client method used is not supported on Speckle Server versions" + f" prior to v{'.'.join(target_version)}" + ) + # if version is dev, it should be supported... (or not) + if self.server_version == ("dev",): + return + if self.server_version and self.server_version < target_version: + raise UnsupportedException(unsupported_message) + + def _check_invites_supported(self): + """Invites are only supported for Speckle Server >= 2.6.4. + Use this check to guard against making unsupported requests on older servers. + """ + self._check_server_version_at_least( + (2, 6, 4), + "Stream invites are only supported as of Speckle Server v2.6.4. Please" + " update your Speckle Server to use this method or use the" + " `grant_permission` flow instead.", + ) diff --git a/src/specklepy/core/api/resources/__init__.py b/src/specklepy/core/api/resources/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/specklepy/core/api/resources/active_user.py b/src/specklepy/core/api/resources/active_user.py new file mode 100644 index 00000000..26616e59 --- /dev/null +++ b/src/specklepy/core/api/resources/active_user.py @@ -0,0 +1,264 @@ +from datetime import datetime, timezone +from typing import List, Optional + +from gql import gql + +from specklepy.core.api.models import ActivityCollection, PendingStreamCollaborator, User +from specklepy.core.api.resource import ResourceBase +from specklepy.logging.exceptions import SpeckleException + +NAME = "active_user" + + +class Resource(ResourceBase): + """API Access class for users""" + + def __init__(self, account, basepath, client, server_version) -> None: + super().__init__( + account=account, + basepath=basepath, + client=client, + name=NAME, + server_version=server_version, + ) + self.schema = User + + def get(self) -> User: + """Gets the profile of a user. If no id argument is provided, + will return the current authenticated user's profile + (as extracted from the authorization header). + + Arguments: + id {str} -- the user id + + Returns: + User -- the retrieved user + """ + query = gql( + """ + query User { + activeUser { + id + email + name + bio + company + avatar + verified + profiles + role + } + } + """ + ) + + params = {} + + return self.make_request(query=query, params=params, return_type="activeUser") + + def update( + self, + name: Optional[str] = None, + company: Optional[str] = None, + bio: Optional[str] = None, + avatar: Optional[str] = None, + ): + """Updates your user profile. All arguments are optional. + + Arguments: + name {str} -- your name + company {str} -- the company you may or may not work for + bio {str} -- tell us about yourself + avatar {str} -- a nice photo of yourself + + Returns @deprecated(version=DEPRECATION_VERSION, reason=DEPRECATION_TEXT): + bool -- True if your profile was updated successfully + """ + query = gql( + """ + mutation UserUpdate($user: UserUpdateInput!) { + userUpdate(user: $user) + } + """ + ) + params = {"name": name, "company": company, "bio": bio, "avatar": avatar} + + params = {"user": {k: v for k, v in params.items() if v is not None}} + + if not params["user"]: + return SpeckleException( + message=( + "You must provide at least one field to update your user profile" + ) + ) + + return self.make_request( + query=query, params=params, return_type="userUpdate", parse_response=False + ) + + def activity( + self, + limit: int = 20, + action_type: Optional[str] = None, + before: Optional[datetime] = None, + after: Optional[datetime] = None, + cursor: Optional[datetime] = None, + ): + """ + Get the activity from a given stream in an Activity collection. + Step into the activity `items` for the list of activity. + If no id argument is provided, will return the current authenticated user's + activity (as extracted from the authorization header). + + Note: all timestamps arguments should be `datetime` of any tz as they will be + converted to UTC ISO format strings + + user_id {str} -- the id of the user to get the activity from + action_type {str} -- filter results to a single action type + (eg: `commit_create` or `commit_receive`) + limit {int} -- max number of Activity items to return + before {datetime} -- latest cutoff for activity + (ie: return all activity _before_ this time) + after {datetime} -- oldest cutoff for activity + (ie: return all activity _after_ this time) + cursor {datetime} -- timestamp cursor for pagination + """ + + query = gql( + """ + query UserActivity( + $action_type: String, + $before:DateTime, + $after: DateTime, + $cursor: DateTime, + $limit: Int + ){ + activeUser { + activity( + actionType: $action_type, + before: $before, + after: $after, + cursor: $cursor, + limit: $limit + ) { + totalCount + cursor + items { + actionType + info + userId + streamId + resourceId + resourceType + message + time + } + } + } + } + """ + ) + + params = { + "limit": limit, + "action_type": action_type, + "before": before.astimezone(timezone.utc).isoformat() if before else before, + "after": after.astimezone(timezone.utc).isoformat() if after else after, + "cursor": cursor.astimezone(timezone.utc).isoformat() if cursor else cursor, + } + + return self.make_request( + query=query, + params=params, + return_type=["activeUser", "activity"], + schema=ActivityCollection, + ) + + def get_all_pending_invites(self) -> List[PendingStreamCollaborator]: + """Get all of the active user's pending stream invites + + Requires Speckle Server version >= 2.6.4 + + Returns: + List[PendingStreamCollaborator] + -- a list of pending invites for the current user + """ + self._check_invites_supported() + + query = gql( + """ + query StreamInvites { + streamInvites{ + id + token + inviteId + streamId + streamName + title + role + invitedBy { + id + name + company + avatar + } + } + } + """ + ) + + return self.make_request( + query=query, + return_type="streamInvites", + schema=PendingStreamCollaborator, + ) + + def get_pending_invite( + self, stream_id: str, token: Optional[str] = None + ) -> Optional[PendingStreamCollaborator]: + """Get a particular pending invite for the active user on a given stream. + If no invite_id is provided, any valid invite will be returned. + + Requires Speckle Server version >= 2.6.4 + + Arguments: + stream_id {str} -- the id of the stream to look for invites on + token {str} -- the token of the invite to look for (optional) + + Returns: + PendingStreamCollaborator + -- the invite for the given stream (or None if it isn't found) + """ + self._check_invites_supported() + + query = gql( + """ + query StreamInvite($streamId: String!, $token: String) { + streamInvite(streamId: $streamId, token: $token) { + id + token + streamId + streamName + title + role + invitedBy { + id + name + company + avatar + } + } + } + """ + ) + + params = {"streamId": stream_id} + if token: + params["token"] = token + + return self.make_request( + query=query, + params=params, + return_type="streamInvite", + schema=PendingStreamCollaborator, + ) diff --git a/src/specklepy/core/api/resources/branch.py b/src/specklepy/core/api/resources/branch.py new file mode 100644 index 00000000..94f6d294 --- /dev/null +++ b/src/specklepy/core/api/resources/branch.py @@ -0,0 +1,219 @@ +from typing import Optional + +from gql import gql + +from specklepy.core.api.models import Branch +from specklepy.core.api.resource import ResourceBase + +NAME = "branch" + + +class Resource(ResourceBase): + """API Access class for branches""" + + def __init__(self, account, basepath, client) -> None: + super().__init__( + account=account, + basepath=basepath, + client=client, + name=NAME, + ) + self.schema = Branch + + def create( + self, stream_id: str, name: str, description: str = "No description provided" + ) -> str: + """Create a new branch on this stream + + Arguments: + name {str} -- the name of the new branch + description {str} -- a short description of the branch + + Returns: + id {str} -- the newly created branch's id + """ + query = gql( + """ + mutation BranchCreate($branch: BranchCreateInput!) { + branchCreate(branch: $branch) + } + """ + ) + params = { + "branch": { + "streamId": stream_id, + "name": name, + "description": description, + } + } + + return self.make_request( + query=query, params=params, return_type="branchCreate", parse_response=False + ) + + def get(self, stream_id: str, name: str, commits_limit: int = 10): + """Get a branch by name from a stream + + Arguments: + stream_id {str} -- the id of the stream to get the branch from + name {str} -- the name of the branch to get + commits_limit {int} -- maximum number of commits to get + + Returns: + Branch -- the fetched branch with its latest commits + """ + query = gql( + """ + query BranchGet($stream_id: String!, $name: String!, $commits_limit: Int!) { + stream(id: $stream_id) { + branch(name: $name) { + id, + name, + description, + commits (limit: $commits_limit) { + totalCount, + cursor, + items { + id, + referencedObject, + sourceApplication, + totalChildrenCount, + message, + authorName, + authorId, + branchName, + parents, + createdAt + } + } + } + } + } + """ + ) + + params = {"stream_id": stream_id, "name": name, "commits_limit": commits_limit} + + return self.make_request( + query=query, params=params, return_type=["stream", "branch"] + ) + + def list(self, stream_id: str, branches_limit: int = 10, commits_limit: int = 10): + """Get a list of branches from a given stream + + Arguments: + stream_id {str} -- the id of the stream to get the branches from + branches_limit {int} -- maximum number of branches to get + commits_limit {int} -- maximum number of commits to get + + Returns: + List[Branch] -- the branches on the stream + """ + query = gql( + """ + query BranchesGet( + $stream_id: String!, + $branches_limit: Int!, + $commits_limit: Int! + ) { + stream(id: $stream_id) { + branches(limit: $branches_limit) { + items { + id + name + description + commits(limit: $commits_limit) { + totalCount + items{ + id + message + referencedObject + sourceApplication + parents + authorId + authorName + branchName + createdAt + } + } + } + } + } + } + """ + ) + + params = { + "stream_id": stream_id, + "branches_limit": branches_limit, + "commits_limit": commits_limit, + } + + return self.make_request( + query=query, params=params, return_type=["stream", "branches", "items"] + ) + + def update( + self, + stream_id: str, + branch_id: str, + name: Optional[str] = None, + description: Optional[str] = None, + ): + """Update a branch + + Arguments: + stream_id {str} -- the id of the stream containing the branch to update + branch_id {str} -- the id of the branch to update + name {str} -- optional: the updated branch name + description {str} -- optional: the updated branch description + + Returns: + bool -- True if update is successful + """ + query = gql( + """ + mutation BranchUpdate($branch: BranchUpdateInput!) { + branchUpdate(branch: $branch) + } + """ + ) + params = { + "branch": { + "streamId": stream_id, + "id": branch_id, + } + } + + if name: + params["branch"]["name"] = name + if description: + params["branch"]["description"] = description + + return self.make_request( + query=query, params=params, return_type="branchUpdate", parse_response=False + ) + + def delete(self, stream_id: str, branch_id: str): + """Delete a branch + + Arguments: + stream_id {str} -- the id of the stream containing the branch to delete + branch_id {str} -- the branch to delete + + Returns: + bool -- True if deletion is successful + """ + query = gql( + """ + mutation BranchDelete($branch: BranchDeleteInput!) { + branchDelete(branch: $branch) + } + """ + ) + + params = {"branch": {"streamId": stream_id, "id": branch_id}} + + return self.make_request( + query=query, params=params, return_type="branchDelete", parse_response=False + ) diff --git a/src/specklepy/core/api/resources/commit.py b/src/specklepy/core/api/resources/commit.py new file mode 100644 index 00000000..0f691515 --- /dev/null +++ b/src/specklepy/core/api/resources/commit.py @@ -0,0 +1,237 @@ +from typing import List, Optional + +from gql import gql + +from specklepy.core.api.models import Commit +from specklepy.core.api.resource import ResourceBase + +NAME = "commit" + + +class Resource(ResourceBase): + """API Access class for commits""" + + def __init__(self, account, basepath, client) -> None: + super().__init__( + account=account, + basepath=basepath, + client=client, + name=NAME, + ) + self.schema = Commit + + def get(self, stream_id: str, commit_id: str) -> Commit: + """ + Gets a commit given a stream and the commit id + + Arguments: + stream_id {str} -- the stream where we can find the commit + commit_id {str} -- the id of the commit you want to get + + Returns: + Commit -- the retrieved commit object + """ + query = gql( + """ + query Commit($stream_id: String!, $commit_id: String!) { + stream(id: $stream_id) { + commit(id: $commit_id) { + id + message + referencedObject + authorId + authorName + authorAvatar + branchName + createdAt + sourceApplication + totalChildrenCount + parents + } + } + } + """ + ) + params = {"stream_id": stream_id, "commit_id": commit_id} + + return self.make_request( + query=query, params=params, return_type=["stream", "commit"] + ) + + def list(self, stream_id: str, limit: int = 10) -> List[Commit]: + """ + Get a list of commits on a given stream + + Arguments: + stream_id {str} -- the stream where the commits are + limit {int} -- the maximum number of commits to fetch (default = 10) + + Returns: + List[Commit] -- a list of the most recent commit objects + """ + query = gql( + """ + query Commits($stream_id: String!, $limit: Int!) { + stream(id: $stream_id) { + commits(limit: $limit) { + items { + id + message + referencedObject + authorName + authorId + authorName + authorAvatar + branchName + createdAt + sourceApplication + totalChildrenCount + parents + } + } + } + } + """ + ) + params = {"stream_id": stream_id, "limit": limit} + + return self.make_request( + query=query, params=params, return_type=["stream", "commits", "items"] + ) + + def create( + self, + stream_id: str, + object_id: str, + branch_name: str = "main", + message: str = "", + source_application: str = "python", + parents: List[str] = None, + ) -> str: + """ + Creates a commit on a branch + + Arguments: + stream_id {str} -- the stream you want to commit to + object_id {str} -- the hash of your commit object + branch_name {str} + -- the name of the branch to commit to (defaults to "main") + message {str} + -- optional: a message to give more information about the commit + source_application{str} + -- optional: the application from which the commit was created + (defaults to "python") + parents {List[str]} -- optional: the id of the parent commits + + Returns: + str -- the id of the created commit + """ + query = gql( + """ + mutation CommitCreate ($commit: CommitCreateInput!) + { commitCreate(commit: $commit)} + """ + ) + params = { + "commit": { + "streamId": stream_id, + "branchName": branch_name, + "objectId": object_id, + "message": message, + "sourceApplication": source_application, + } + } + if parents: + params["commit"]["parents"] = parents + + return self.make_request( + query=query, params=params, return_type="commitCreate", parse_response=False + ) + + def update(self, stream_id: str, commit_id: str, message: str) -> bool: + """ + Update a commit + + Arguments: + stream_id {str} + -- the id of the stream that contains the commit you'd like to update + commit_id {str} -- the id of the commit you'd like to update + message {str} -- the updated commit message + + Returns: + bool -- True if the operation succeeded + """ + query = gql( + """ + mutation CommitUpdate($commit: CommitUpdateInput!) + { commitUpdate(commit: $commit)} + """ + ) + params = { + "commit": {"streamId": stream_id, "id": commit_id, "message": message} + } + + return self.make_request( + query=query, params=params, return_type="commitUpdate", parse_response=False + ) + + def delete(self, stream_id: str, commit_id: str) -> bool: + """ + Delete a commit + + Arguments: + stream_id {str} + -- the id of the stream that contains the commit you'd like to delete + commit_id {str} -- the id of the commit you'd like to delete + + Returns: + bool -- True if the operation succeeded + """ + query = gql( + """ + mutation CommitDelete($commit: CommitDeleteInput!) + { commitDelete(commit: $commit)} + """ + ) + params = {"commit": {"streamId": stream_id, "id": commit_id}} + + return self.make_request( + query=query, params=params, return_type="commitDelete", parse_response=False + ) + + def received( + self, + stream_id: str, + commit_id: str, + source_application: str = "python", + message: Optional[str] = None, + ) -> bool: + """ + Mark a commit object a received by the source application. + """ + query = gql( + """ + mutation CommitReceive($receivedInput:CommitReceivedInput!){ + commitReceive(input:$receivedInput) + } + """ + ) + params = { + "receivedInput": { + "sourceApplication": source_application, + "streamId": stream_id, + "commitId": commit_id, + "message": "message", + } + } + + try: + return self.make_request( + query=query, + params=params, + return_type="commitReceive", + parse_response=False, + ) + except Exception as ex: + print(ex.with_traceback) + return False diff --git a/src/specklepy/core/api/resources/object.py b/src/specklepy/core/api/resources/object.py new file mode 100644 index 00000000..1d3369ea --- /dev/null +++ b/src/specklepy/core/api/resources/object.py @@ -0,0 +1,92 @@ +from typing import Dict, List + +from gql import gql + +from specklepy.core.api.resource import ResourceBase +from specklepy.objects.base import Base + +NAME = "object" + + +class Resource(ResourceBase): + """API Access class for objects""" + + def __init__(self, account, basepath, client) -> None: + super().__init__( + account=account, + basepath=basepath, + client=client, + name=NAME, + ) + self.schema = Base + + def get(self, stream_id: str, object_id: str) -> Base: + """ + Get a stream object + + Arguments: + stream_id {str} -- the id of the stream for the object + object_id {str} -- the hash of the object you want to get + + Returns: + Base -- the returned Base object + """ + query = gql( + """ + query Object($stream_id: String!, $object_id: String!) { + stream(id: $stream_id) { + id + name + object(id: $object_id) { + id + speckleType + applicationId + createdAt + totalChildrenCount + data + } + } + } + """ + ) + params = {"stream_id": stream_id, "object_id": object_id} + + return self.make_request( + query=query, + params=params, + return_type=["stream", "object", "data"], + ) + + def create(self, stream_id: str, objects: List[Dict]) -> str: + """ + Not advised - generally, you want to use `operations.send()`. + + Create a new object on a stream. + To send a base object, you can prepare it by running it through the + `BaseObjectSerializer.traverse_base()` function to get a valid (serialisable) + object to send. + + NOTE: this does not create a commit - you can create one with + `SpeckleClient.commit.create`. + Dynamic fields will be located in the 'data' dict of the received `Base` object + + Arguments: + stream_id {str} -- the id of the stream you want to send the object to + objects {List[Dict]} + -- a list of base dictionary objects (NOTE: must be json serialisable) + + Returns: + str -- the id of the object + """ + query = gql( + """ + mutation ObjectCreate($object_input: ObjectCreateInput!) { + objectCreate(objectInput: $object_input) + } + """ + ) + params = {"object_input": {"streamId": stream_id, "objects": objects}} + + return self.make_request( + query=query, params=params, return_type="objectCreate", parse_response=False + ) diff --git a/src/specklepy/core/api/resources/other_user.py b/src/specklepy/core/api/resources/other_user.py new file mode 100644 index 00000000..d39187dd --- /dev/null +++ b/src/specklepy/core/api/resources/other_user.py @@ -0,0 +1,172 @@ +from datetime import datetime, timezone +from typing import List, Optional, Union + +from gql import gql + +from specklepy.core.api.models import ActivityCollection, LimitedUser +from specklepy.core.api.resource import ResourceBase +from specklepy.logging.exceptions import SpeckleException + +NAME = "other_user" + + +class Resource(ResourceBase): + """API Access class for other users, that are not the currently active user.""" + + def __init__(self, account, basepath, client, server_version) -> None: + super().__init__( + account=account, + basepath=basepath, + client=client, + name=NAME, + server_version=server_version, + ) + self.schema = LimitedUser + + def get(self, id: str) -> LimitedUser: + """ + Gets the profile of another user. + + Arguments: + id {str} -- the user id + + Returns: + LimitedUser -- the retrieved profile of another user + """ + query = gql( + """ + query OtherUser($id: String!) { + otherUser(id: $id) { + id + name + bio + company + avatar + verified + role + } + } + """ + ) + + params = {"id": id} + + return self.make_request(query=query, params=params, return_type="otherUser") + + def search( + self, search_query: str, limit: int = 25 + ) -> Union[List[LimitedUser], SpeckleException]: + """Searches for user by name or email. The search query must be at least + 3 characters long + + Arguments: + search_query {str} -- a string to search for + limit {int} -- the maximum number of results to return + Returns: + List[LimitedUser] -- a list of User objects that match the search query + """ + if len(search_query) < 3: + return SpeckleException( + message="User search query must be at least 3 characters" + ) + + query = gql( + """ + query UserSearch($search_query: String!, $limit: Int!) { + userSearch(query: $search_query, limit: $limit) { + items { + id + name + bio + company + avatar + verified + } + } + } + """ + ) + params = {"search_query": search_query, "limit": limit} + + return self.make_request( + query=query, params=params, return_type=["userSearch", "items"] + ) + + def activity( + self, + user_id: str, + limit: int = 20, + action_type: Optional[str] = None, + before: Optional[datetime] = None, + after: Optional[datetime] = None, + cursor: Optional[datetime] = None, + ) -> ActivityCollection: + """ + Get the activity from a given stream in an Activity collection. + Step into the activity `items` for the list of activity. + + Note: all timestamps arguments should be `datetime` of + any tz as they will be converted to UTC ISO format strings + + user_id {str} -- the id of the user to get the activity from + action_type {str} -- filter results to a single action type + (eg: `commit_create` or `commit_receive`) + limit {int} -- max number of Activity items to return + before {datetime} -- latest cutoff for activity + (ie: return all activity _before_ this time) + after {datetime} -- oldest cutoff for activity + (ie: return all activity _after_ this time) + cursor {datetime} -- timestamp cursor for pagination + """ + + query = gql( + """ + query UserActivity( + $user_id: String!, + $action_type: String, + $before:DateTime, + $after: DateTime, + $cursor: DateTime, + $limit: Int + ){ + otherUser(id: $user_id) { + activity( + actionType: $action_type, + before: $before, + after: $after, + cursor: $cursor, + limit: $limit + ) { + totalCount + cursor + items { + actionType + info + userId + streamId + resourceId + resourceType + message + time + } + } + } + } + """ + ) + + params = { + "user_id": user_id, + "limit": limit, + "action_type": action_type, + "before": before.astimezone(timezone.utc).isoformat() if before else before, + "after": after.astimezone(timezone.utc).isoformat() if after else after, + "cursor": cursor.astimezone(timezone.utc).isoformat() if cursor else cursor, + } + + return self.make_request( + query=query, + params=params, + return_type=["otherUser", "activity"], + schema=ActivityCollection, + ) diff --git a/src/specklepy/core/api/resources/server.py b/src/specklepy/core/api/resources/server.py new file mode 100644 index 00000000..92e57ed3 --- /dev/null +++ b/src/specklepy/core/api/resources/server.py @@ -0,0 +1,174 @@ +import re +from typing import Any, Dict, List, Tuple + +from gql import gql + +from specklepy.core.api.models import ServerInfo +from specklepy.core.api.resource import ResourceBase +from specklepy.logging.exceptions import GraphQLException + +NAME = "server" + + +class Resource(ResourceBase): + """API Access class for the server""" + + def __init__(self, account, basepath, client) -> None: + super().__init__( + account=account, + basepath=basepath, + client=client, + name=NAME, + ) + + def get(self) -> ServerInfo: + """Get the server info + + Returns: + dict -- the server info in dictionary form + """ + query = gql( + """ + query Server { + serverInfo { + name + company + description + adminContact + canonicalUrl + version + roles { + name + description + resourceTarget + } + scopes { + name + description + } + authStrategies{ + id + name + icon + } + } + } + """ + ) + + return self.make_request( + query=query, return_type="serverInfo", schema=ServerInfo + ) + + def version(self) -> Tuple[Any, ...]: + """Get the server version + + Returns: + the server version in the format (major, minor, patch, (tag, build)) + eg (2, 6, 3) for a stable build and (2, 6, 4, 'alpha', 4711) for alpha + """ + # not tracking as it will be called along with other mutations / queries as a check + query = gql( + """ + query Server { + serverInfo { + version + } + } + """ + ) + ver = self.make_request( + query=query, return_type=["serverInfo", "version"], parse_response=False + ) + if isinstance(ver, Exception): + raise GraphQLException( + f"Could not get server version for {self.basepath}", [ver] + ) + + # pylint: disable=consider-using-generator; (list comp is faster) + return tuple( + [ + int(segment) if segment.isdigit() else segment + for segment in re.split(r"\.|-", ver) + ] + ) + + def apps(self) -> Dict: + """Get the apps registered on the server + + Returns: + dict -- a dictionary of apps registered on the server + """ + query = gql( + """ + query Apps { + apps{ + id + name + description + termsAndConditionsLink + trustByDefault + logo + author { + id + name + avatar + } + } + } + """ + ) + + return self.make_request(query=query, return_type="apps", parse_response=False) + + def create_token(self, name: str, scopes: List[str], lifespan: int) -> str: + """Create a personal API token + + Arguments: + scopes {List[str]} -- the scopes to grant with this token + name {str} -- a name for your new token + lifespan {int} -- duration before the token expires + + Returns: + str -- the new API token. note: this is the only time you'll see the token! + """ + query = gql( + """ + mutation TokenCreate($token: ApiTokenCreateInput!) { + apiTokenCreate(token: $token) + } + """ + ) + params = {"token": {"scopes": scopes, "name": name, "lifespan": lifespan}} + + return self.make_request( + query=query, + params=params, + return_type="apiTokenCreate", + parse_response=False, + ) + + def revoke_token(self, token: str) -> bool: + """Revokes (deletes) a personal API token + + Arguments: + token {str} -- the token to revoke (delete) + + Returns: + bool -- True if the token was successfully deleted + """ + query = gql( + """ + mutation TokenRevoke($token: String!) { + apiTokenRevoke(token: $token) + } + """ + ) + params = {"token": token} + + return self.make_request( + query=query, + params=params, + return_type="apiTokenRevoke", + parse_response=False, + ) diff --git a/src/specklepy/core/api/resources/stream.py b/src/specklepy/core/api/resources/stream.py new file mode 100644 index 00000000..40b866bd --- /dev/null +++ b/src/specklepy/core/api/resources/stream.py @@ -0,0 +1,751 @@ +from datetime import datetime, timezone +from typing import List, Optional + +from deprecated import deprecated +from gql import gql + +from specklepy.core.api.models import ActivityCollection, PendingStreamCollaborator, Stream +from specklepy.core.api.resource import ResourceBase +from specklepy.logging.exceptions import SpeckleException, UnsupportedException + +NAME = "stream" + + +class Resource(ResourceBase): + """API Access class for streams""" + + def __init__(self, account, basepath, client, server_version) -> None: + super().__init__( + account=account, + basepath=basepath, + client=client, + name=NAME, + server_version=server_version, + ) + + self.schema = Stream + + def get(self, id: str, branch_limit: int = 10, commit_limit: int = 10) -> Stream: + """Get the specified stream from the server + + Arguments: + id {str} -- the stream id + branch_limit {int} -- the maximum number of branches to return + commit_limit {int} -- the maximum number of commits to return + + Returns: + Stream -- the retrieved stream + """ + query = gql( + """ + query Stream($id: String!, $branch_limit: Int!, $commit_limit: Int!) { + stream(id: $id) { + id + name + role + description + isPublic + createdAt + updatedAt + commentCount + favoritesCount + collaborators { + id + name + role + avatar + } + branches(limit: $branch_limit) { + totalCount + cursor + items { + id + name + description + commits(limit: $commit_limit) { + totalCount + cursor + items { + id + message + authorId + createdAt + authorName + referencedObject + sourceApplication + } + } + } + } + } + } + """ + ) + + params = {"id": id, "branch_limit": branch_limit, "commit_limit": commit_limit} + + return self.make_request(query=query, params=params, return_type="stream") + + def list(self, stream_limit: int = 10) -> List[Stream]: + """Get a list of the user's streams + + Arguments: + stream_limit {int} -- The maximum number of streams to return + + Returns: + List[Stream] -- A list of Stream objects + """ + query = gql( + """ + query User($stream_limit: Int!) { + user { + id + bio + name + email + avatar + company + verified + profiles + role + streams(limit: $stream_limit) { + totalCount + cursor + items { + id + name + role + isPublic + createdAt + updatedAt + description + commentCount + favoritesCount + collaborators { + id + name + role + } + } + } + } + } + """ + ) + + params = {"stream_limit": stream_limit} + + return self.make_request( + query=query, params=params, return_type=["user", "streams", "items"] + ) + + def create( + self, + name: str = "Anonymous Python Stream", + description: str = "No description provided", + is_public: bool = True, + ) -> str: + """Create a new stream + + Arguments: + name {str} -- the name of the string + description {str} -- a short description of the stream + is_public {bool} + -- whether or not the stream can be viewed by anyone with the id + + Returns: + id {str} -- the id of the newly created stream + """ + query = gql( + """ + mutation StreamCreate($stream: StreamCreateInput!) { + streamCreate(stream: $stream) + } + """ + ) + + params = { + "stream": {"name": name, "description": description, "isPublic": is_public} + } + + return self.make_request( + query=query, params=params, return_type="streamCreate", parse_response=False + ) + + def update( + self, + id: str, + name: Optional[str] = None, + description: Optional[str] = None, + is_public: Optional[bool] = None, + ) -> bool: + """Update an existing stream + + Arguments: + id {str} -- the id of the stream to be updated + name {str} -- the name of the string + description {str} -- a short description of the stream + is_public {bool} + -- whether or not the stream can be viewed by anyone with the id + + Returns: + bool -- whether the stream update was successful + """ + query = gql( + """ + mutation StreamUpdate($stream: StreamUpdateInput!) { + streamUpdate(stream: $stream) + } + """ + ) + + params = { + "id": id, + "name": name, + "description": description, + "isPublic": is_public, + } + # remove None values so graphql doesn't cry + params = {"stream": {k: v for k, v in params.items() if v is not None}} + + return self.make_request( + query=query, params=params, return_type="streamUpdate", parse_response=False + ) + + def delete(self, id: str) -> bool: + """Delete a stream given its id + + Arguments: + id {str} -- the id of the stream to delete + + Returns: + bool -- whether the deletion was successful + """ + query = gql( + """ + mutation StreamDelete($id: String!) { + streamDelete(id: $id) + } + """ + ) + + params = {"id": id} + + return self.make_request( + query=query, params=params, return_type="streamDelete", parse_response=False + ) + + def search( + self, + search_query: str, + limit: int = 25, + branch_limit: int = 10, + commit_limit: int = 10, + ): + """Search for streams by name, description, or id + + Arguments: + search_query {str} -- a string to search for + limit {int} -- the maximum number of results to return + branch_limit {int} -- the maximum number of branches to return + commit_limit {int} -- the maximum number of commits to return + + Returns: + List[Stream] -- a list of Streams that match the search query + """ + query = gql( + """ + query StreamSearch( + $search_query: String!, + $limit: Int!, + $branch_limit:Int!, + $commit_limit:Int! + ) { + streams(query: $search_query, limit: $limit) { + items { + id + name + role + description + isPublic + createdAt + updatedAt + collaborators { + id + name + role + avatar + } + branches(limit: $branch_limit) { + totalCount + cursor + items { + id + name + description + commits(limit: $commit_limit) { + totalCount + cursor + items { + id + referencedObject + message + authorName + authorId + createdAt + } + } + } + } + } + } + } + """ + ) + + params = { + "search_query": search_query, + "limit": limit, + "branch_limit": branch_limit, + "commit_limit": commit_limit, + } + + return self.make_request( + query=query, params=params, return_type=["streams", "items"] + ) + + def favorite(self, stream_id: str, favorited: bool = True): + """Favorite or unfavorite the given stream. + + Arguments: + stream_id {str} -- the id of the stream to favorite / unfavorite + favorited {bool} + -- whether to favorite (True) or unfavorite (False) the stream + + Returns: + Stream -- the stream with its `id`, `name`, and `favoritedDate` + """ + query = gql( + """ + mutation StreamFavorite($stream_id: String!, $favorited: Boolean!) { + streamFavorite(streamId: $stream_id, favorited: $favorited) { + id + name + favoritedDate + favoritesCount + } + } + """ + ) + + params = { + "stream_id": stream_id, + "favorited": favorited, + } + + return self.make_request( + query=query, params=params, return_type=["streamFavorite"] + ) + + def get_all_pending_invites( + self, stream_id: str + ) -> List[PendingStreamCollaborator]: + """Get all of the pending invites on a stream. + You must be a `stream:owner` to query this. + + Requires Speckle Server version >= 2.6.4 + + Arguments: + stream_id {str} -- the stream id from which to get the pending invites + + Returns: + List[PendingStreamCollaborator] + -- a list of pending invites for the specified stream + """ + self._check_invites_supported() + + query = gql( + """ + query StreamInvites($streamId: String!) { + stream(id: $streamId){ + pendingCollaborators { + id + token + inviteId + streamId + streamName + title + role + invitedBy{ + id + name + company + avatar + } + user { + id + name + company + avatar + } + } + } + } + """ + ) + params = {"streamId": stream_id} + + return self.make_request( + query=query, + params=params, + return_type=["stream", "pendingCollaborators"], + schema=PendingStreamCollaborator, + ) + + def invite( + self, + stream_id: str, + email: Optional[str] = None, + user_id: Optional[str] = None, + role: str = "stream:contributor", # should default be reviewer? + message: Optional[str] = None, + ): + """Invite someone to a stream using either their email or user id + + Requires Speckle Server version >= 2.6.4 + + Arguments: + stream_id {str} -- the id of the stream to invite the user to + email {str} -- the email of the user to invite (use this OR `user_id`) + user_id {str} -- the id of the user to invite (use this OR `email`) + role {str} + -- the role to assign to the user (defaults to `stream:contributor`) + message {str} + -- a message to send along with this invite to the specified user + + Returns: + bool -- True if the operation was successful + """ + self._check_invites_supported() + + if email is None and user_id is None: + raise SpeckleException( + "You must provide either an email or a user id to use the" + " `stream.invite` method" + ) + + query = gql( + """ + mutation StreamInviteCreate($input: StreamInviteCreateInput!) { + streamInviteCreate(input: $input) + } + """ + ) + + params = { + "email": email, + "userId": user_id, + "streamId": stream_id, + "message": message, + "role": role, + } + params = {"input": {k: v for k, v in params.items() if v is not None}} + + return self.make_request( + query=query, + params=params, + return_type="streamInviteCreate", + parse_response=False, + ) + + def invite_batch( + self, + stream_id: str, + emails: Optional[List[str]] = None, + user_ids: Optional[List[None]] = None, + message: Optional[str] = None, + ) -> bool: + """Invite a batch of users to a specified stream. + + Requires Speckle Server version >= 2.6.4 + + Arguments: + stream_id {str} -- the id of the stream to invite the user to + emails {List[str]} + -- the email of the user to invite (use this and/or `user_ids`) + user_id {List[str]} + -- the id of the user to invite (use this and/or `emails`) + message {str} + -- a message to send along with this invite to the specified user + + Returns: + bool -- True if the operation was successful + """ + self._check_invites_supported() + if emails is None and user_ids is None: + raise SpeckleException( + "You must provide either an email or a user id to use the" + " `stream.invite` method" + ) + + query = gql( + """ + mutation StreamInviteBatchCreate($input: [StreamInviteCreateInput!]!) { + streamInviteBatchCreate(input: $input) + } + """ + ) + + email_invites = [ + {"streamId": stream_id, "message": message, "email": email} + for email in (emails if emails is not None else []) + if email is not None + ] + + user_invites = [ + {"streamId": stream_id, "message": message, "userId": user_id} + for user_id in (user_ids if user_ids is not None else []) + if user_id is not None + ] + + + params = {"input": [*email_invites, *user_invites]} + + return self.make_request( + query=query, + params=params, + return_type="streamInviteBatchCreate", + parse_response=False, + ) + + def invite_cancel(self, stream_id: str, invite_id: str) -> bool: + """Cancel an existing stream invite + + Requires Speckle Server version >= 2.6.4 + + Arguments: + stream_id {str} -- the id of the stream invite + invite_id {str} -- the id of the invite to use + + Returns: + bool -- true if the operation was successful + """ + self._check_invites_supported() + + query = gql( + """ + mutation StreamInviteCancel($streamId: String!, $inviteId: String!) { + streamInviteCancel(streamId: $streamId, inviteId: $inviteId) + } + """ + ) + + params = {"streamId": stream_id, "inviteId": invite_id} + + return self.make_request( + query=query, + params=params, + return_type="streamInviteCancel", + parse_response=False, + ) + + def invite_use(self, stream_id: str, token: str, accept: bool = True) -> bool: + """Accept or decline a stream invite + + Requires Speckle Server version >= 2.6.4 + + Arguments: + stream_id {str} + -- the id of the stream for which the user has a pending invite + token {str} -- the token of the invite to use + accept {bool} -- whether or not to accept the invite (defaults to True) + + Returns: + bool -- true if the operation was successful + """ + self._check_invites_supported() + + query = gql( + """ + mutation StreamInviteUse( + $accept: Boolean!, + $streamId: String!, + $token: String! + ) { + streamInviteUse(accept: $accept, streamId: $streamId, token: $token) + } + """ + ) + + params = {"streamId": stream_id, "token": token, "accept": accept} + + return self.make_request( + query=query, + params=params, + return_type="streamInviteUse", + parse_response=False, + ) + + def update_permission(self, stream_id: str, user_id: str, role: str): + """Updates permissions for a user on a given stream + + Valid for Speckle Server >=2.6.4 + + Arguments: + stream_id {str} -- the id of the stream to grant permissions to + user_id {str} -- the id of the user to grant permissions for + role {str} -- the role to grant the user + + Returns: + bool -- True if the operation was successful + """ + if self.server_version and ( + self.server_version != ("dev",) and self.server_version < (2, 6, 4) + ): + raise UnsupportedException( + "Server mutation `update_permission` is only supported as of Speckle" + " Server v2.6.4. Please update your Speckle Server to use this method" + " or use the `grant_permission` method instead." + ) + query = gql( + """ + mutation StreamUpdatePermission( + $permission_params: StreamUpdatePermissionInput! + ) { + streamUpdatePermission(permissionParams: $permission_params) + } + """ + ) + + params = { + "permission_params": { + "streamId": stream_id, + "userId": user_id, + "role": role, + } + } + + return self.make_request( + query=query, + params=params, + return_type="streamUpdatePermission", + parse_response=False, + ) + + def revoke_permission(self, stream_id: str, user_id: str): + """Revoke permissions from a user on a given stream + + Arguments: + stream_id {str} -- the id of the stream to revoke permissions from + user_id {str} -- the id of the user to revoke permissions from + + Returns: + bool -- True if the operation was successful + """ + query = gql( + """ + mutation StreamRevokePermission( + $permission_params: StreamRevokePermissionInput! + ) { + streamRevokePermission(permissionParams: $permission_params) + } + """ + ) + + params = {"permission_params": {"streamId": stream_id, "userId": user_id}} + + return self.make_request( + query=query, + params=params, + return_type="streamRevokePermission", + parse_response=False, + ) + + def activity( + self, + stream_id: str, + action_type: Optional[str] = None, + limit: int = 20, + before: Optional[datetime] = None, + after: Optional[datetime] = None, + cursor: Optional[datetime] = None, + ): + """ + Get the activity from a given stream in an Activity collection. + Step into the activity `items` for the list of activity. + + Note: all timestamps arguments should be `datetime` of any tz + as they will be converted to UTC ISO format strings + + stream_id {str} -- the id of the stream to get activity from + action_type {str} + -- filter results to a single action type + (eg: `commit_create` or `commit_receive`) + limit {int} -- max number of Activity items to return + before {datetime} + -- latest cutoff for activity (ie: return all activity _before_ this time) + after {datetime} + -- oldest cutoff for activity (ie: return all activity _after_ this time) + cursor {datetime} -- timestamp cursor for pagination + """ + query = gql( + """ + query StreamActivity( + $stream_id: String!, + $action_type: String, + $before:DateTime, + $after: DateTime, + $cursor: DateTime, + $limit: Int + ){ + stream(id: $stream_id) { + activity( + actionType: $action_type, + before: $before, + after: $after, + cursor: $cursor, + limit: $limit + ) { + totalCount + cursor + items { + actionType + info + userId + streamId + resourceId + resourceType + message + time + } + } + } + } + """ + ) + try: + params = { + "stream_id": stream_id, + "limit": limit, + "action_type": action_type, + "before": before.astimezone(timezone.utc).isoformat() + if before + else before, + "after": after.astimezone(timezone.utc).isoformat() if after else after, + "cursor": cursor.astimezone(timezone.utc).isoformat() + if cursor + else cursor, + } + except AttributeError as e: + raise SpeckleException( + "Could not get stream activity - `before`, `after`, and `cursor` must" + " be in `datetime` format if provided", + ValueError(), + ) from e + + return self.make_request( + query=query, + params=params, + return_type=["stream", "activity"], + schema=ActivityCollection, + ) diff --git a/src/specklepy/core/api/resources/subscriptions.py b/src/specklepy/core/api/resources/subscriptions.py new file mode 100644 index 00000000..445ef4e8 --- /dev/null +++ b/src/specklepy/core/api/resources/subscriptions.py @@ -0,0 +1,138 @@ +from functools import wraps +from typing import Callable, Dict, List, Optional, Union + +from gql import gql +from graphql import DocumentNode + +from specklepy.core.api.resource import ResourceBase +from specklepy.core.api.resources.stream import Stream +from specklepy.logging.exceptions import SpeckleException + +NAME = "subscribe" + + +def check_wsclient(function): + @wraps(function) + async def check_wsclient_wrapper(self, *args, **kwargs): + if self.client is None: + raise SpeckleException( + "You must authenticate before you can subscribe to events" + ) + else: + return await function(self, *args, **kwargs) + + return check_wsclient_wrapper + + +class Resource(ResourceBase): + """API Access class for subscriptions""" + + def __init__(self, account, basepath, client) -> None: + super().__init__( + account=account, + basepath=basepath, + client=client, + name=NAME, + ) + + @check_wsclient + async def stream_added(self, callback: Optional[Callable] = None): + """Subscribes to new stream added event for your profile. + Use this to display an up-to-date list of streams. + + Arguments: + callback {Callable[Stream]} -- a function that takes the updated stream + as an argument and executes each time a stream is added + + Returns: + Stream -- the update stream + """ + query = gql( + """ + subscription { userStreamAdded } + """ + ) + return await self.subscribe( + query=query, callback=callback, return_type="userStreamAdded", schema=Stream + ) + + @check_wsclient + async def stream_updated(self, id: str, callback: Optional[Callable] = None): + """ + Subscribes to stream updated event. + Use this in clients/components that pertain only to this stream. + + Arguments: + id {str} -- the stream id of the stream to subscribe to + callback {Callable[Stream]} + -- a function that takes the updated stream + as an argument and executes each time the stream is updated + + Returns: + Stream -- the update stream + """ + query = gql( + """ + subscription Update($id: String!) { streamUpdated(streamId: $id) } + """ + ) + params = {"id": id} + + return await self.subscribe( + query=query, + params=params, + callback=callback, + return_type="streamUpdated", + schema=Stream, + ) + + @check_wsclient + async def stream_removed(self, callback: Optional[Callable] = None): + """Subscribes to stream removed event for your profile. + Use this to display an up-to-date list of streams for your profile. + NOTE: If someone revokes your permissions on a stream, + this subscription will be triggered with an extra value of revokedBy + in the payload. + + Arguments: + callback {Callable[Dict]} + -- a function that takes the returned dict as an argument + and executes each time a stream is removed + + Returns: + dict -- dict containing 'id' of stream removed and optionally 'revokedBy' + """ + query = gql( + """ + subscription { userStreamRemoved } + """ + ) + + return await self.subscribe( + query=query, + callback=callback, + return_type="userStreamRemoved", + parse_response=False, + ) + + @check_wsclient + async def subscribe( + self, + query: DocumentNode, + params: Optional[Dict] = None, + callback: Optional[Callable] = None, + return_type: Optional[Union[str, List]] = None, + schema=None, + parse_response: bool = True, + ): + # if self.client.transport.websocket is None: + # TODO: add multiple subs to the same ws connection + async with self.client as session: + async for res in session.subscribe(query, variable_values=params): + res = self._step_into_response(response=res, return_type=return_type) + if parse_response: + res = self._parse_response(response=res, schema=schema) + if callback is not None: + callback(res) + else: + return res diff --git a/src/specklepy/core/api/resources/user.py b/src/specklepy/core/api/resources/user.py new file mode 100644 index 00000000..c55b1e0a --- /dev/null +++ b/src/specklepy/core/api/resources/user.py @@ -0,0 +1,322 @@ +from datetime import datetime, timezone +from typing import List, Optional, Union + +from deprecated import deprecated +from gql import gql + +from specklepy.core.api.models import ActivityCollection, PendingStreamCollaborator, User +from specklepy.core.api.resource import ResourceBase + +from specklepy.logging.exceptions import SpeckleException + +NAME = "user" + +DEPRECATION_VERSION = "2.9.0" +DEPRECATION_TEXT = ( + "The user resource is deprecated, please use the active_user or other_user" + " resources" +) + + +class Resource(ResourceBase): + """API Access class for users""" + + def __init__(self, account, basepath, client, server_version) -> None: + super().__init__( + account=account, + basepath=basepath, + client=client, + name=NAME, + server_version=server_version, + ) + self.schema = User + + @deprecated(version=DEPRECATION_VERSION, reason=DEPRECATION_TEXT) + def get(self, id: Optional[str] = None) -> User: + """ + Gets the profile of a user. + If no id argument is provided, will return the current authenticated + user's profile (as extracted from the authorization header). + + Arguments: + id {str} -- the user id + + Returns: + User -- the retrieved user + """ + query = gql( + """ + query User($id: String) { + user(id: $id) { + id + email + name + bio + company + avatar + verified + profiles + role + } + } + """ + ) + + params = {"id": id} + + return self.make_request(query=query, params=params, return_type="user") + + @deprecated(version=DEPRECATION_VERSION, reason=DEPRECATION_TEXT) + def search( + self, search_query: str, limit: int = 25 + ) -> Union[List[User], SpeckleException]: + """ + Searches for user by name or email. + The search query must be at least 3 characters long + + Arguments: + search_query {str} -- a string to search for + limit {int} -- the maximum number of results to return + Returns: + List[User] -- a list of User objects that match the search query + """ + if len(search_query) < 3: + return SpeckleException( + message="User search query must be at least 3 characters" + ) + + query = gql( + """ + query UserSearch($search_query: String!, $limit: Int!) { + userSearch(query: $search_query, limit: $limit) { + items { + id + name + bio + company + avatar + verified + } + } + } + """ + ) + params = {"search_query": search_query, "limit": limit} + + return self.make_request( + query=query, params=params, return_type=["userSearch", "items"] + ) + + @deprecated(version=DEPRECATION_VERSION, reason=DEPRECATION_TEXT) + def update( + self, + name: Optional[str] = None, + company: Optional[str] = None, + bio: Optional[str] = None, + avatar: Optional[str] = None, + ): + """Updates your user profile. All arguments are optional. + + Arguments: + name {str} -- your name + company {str} -- the company you may or may not work for + bio {str} -- tell us about yourself + avatar {str} -- a nice photo of yourself + + Returns: + bool -- True if your profile was updated successfully + """ + query = gql( + """ + mutation UserUpdate($user: UserUpdateInput!) { + userUpdate(user: $user) + } + """ + ) + params = {"name": name, "company": company, "bio": bio, "avatar": avatar} + + params = {"user": {k: v for k, v in params.items() if v is not None}} + + if not params["user"]: + return SpeckleException( + message=( + "You must provide at least one field to update your user profile" + ) + ) + + return self.make_request( + query=query, params=params, return_type="userUpdate", parse_response=False + ) + + @deprecated(version=DEPRECATION_VERSION, reason=DEPRECATION_TEXT) + def activity( + self, + user_id: Optional[str] = None, + limit: int = 20, + action_type: Optional[str] = None, + before: Optional[datetime] = None, + after: Optional[datetime] = None, + cursor: Optional[datetime] = None, + ): + """ + Get the activity from a given stream in an Activity collection. + Step into the activity `items` for the list of activity. + If no id argument is provided, will return the current authenticated + user's activity (as extracted from the authorization header). + + Note: all timestamps arguments should be `datetime` of any tz as + they will be converted to UTC ISO format strings + + user_id {str} -- the id of the user to get the activity from + action_type {str} -- filter results to a single action type + (eg: `commit_create` or `commit_receive`) + limit {int} -- max number of Activity items to return + before {datetime} + -- latest cutoff for activity (ie: return all activity _before_ this time) + after {datetime} + -- oldest cutoff for activity (ie: return all activity _after_ this time) + cursor {datetime} -- timestamp cursor for pagination + """ + + query = gql( + """ + query UserActivity( + $user_id: String, + $action_type: String, + $before:DateTime, + $after: DateTime, + $cursor: DateTime, + $limit: Int + ){ + user(id: $user_id) { + activity( + actionType: $action_type, + before: $before, + after: $after, + cursor: $cursor, + limit: $limit + ) { + totalCount + cursor + items { + actionType + info + userId + streamId + resourceId + resourceType + message + time + } + } + } + } + """ + ) + + params = { + "user_id": user_id, + "limit": limit, + "action_type": action_type, + "before": before.astimezone(timezone.utc).isoformat() if before else before, + "after": after.astimezone(timezone.utc).isoformat() if after else after, + "cursor": cursor.astimezone(timezone.utc).isoformat() if cursor else cursor, + } + + return self.make_request( + query=query, + params=params, + return_type=["user", "activity"], + schema=ActivityCollection, + ) + + @deprecated(version=DEPRECATION_VERSION, reason=DEPRECATION_TEXT) + def get_all_pending_invites(self) -> List[PendingStreamCollaborator]: + """Get all of the active user's pending stream invites + + Requires Speckle Server version >= 2.6.4 + + Returns: + List[PendingStreamCollaborator] + -- a list of pending invites for the current user + """ + self._check_invites_supported() + + query = gql( + """ + query StreamInvites { + streamInvites{ + id + token + inviteId + streamId + streamName + title + role + invitedBy { + id + name + company + avatar + } + } + } + """ + ) + + return self.make_request( + query=query, + return_type="streamInvites", + schema=PendingStreamCollaborator, + ) + + @deprecated(version=DEPRECATION_VERSION, reason=DEPRECATION_TEXT) + def get_pending_invite( + self, stream_id: str, token: Optional[str] = None + ) -> Optional[PendingStreamCollaborator]: + """Get a particular pending invite for the active user on a given stream. + If no invite_id is provided, any valid invite will be returned. + + Requires Speckle Server version >= 2.6.4 + + Arguments: + stream_id {str} -- the id of the stream to look for invites on + token {str} -- the token of the invite to look for (optional) + + Returns: + PendingStreamCollaborator + -- the invite for the given stream (or None if it isn't found) + """ + self._check_invites_supported() + + query = gql( + """ + query StreamInvite($streamId: String!, $token: String) { + streamInvite(streamId: $streamId, token: $token) { + id + token + streamId + streamName + title + role + invitedBy { + id + name + company + avatar + } + } + } + """ + ) + + params = {"streamId": stream_id} + if token: + params["token"] = token + + return self.make_request( + query=query, + params=params, + return_type="streamInvite", + schema=PendingStreamCollaborator, + ) diff --git a/src/specklepy/core/api/wrapper.py b/src/specklepy/core/api/wrapper.py new file mode 100644 index 00000000..6e22b973 --- /dev/null +++ b/src/specklepy/core/api/wrapper.py @@ -0,0 +1,186 @@ +from urllib.parse import unquote, urlparse +from warnings import warn + +from specklepy.core.api.client import SpeckleClient +from specklepy.core.api.credentials import ( + Account, + get_account_from_token, + get_local_accounts, +) +from specklepy.logging.exceptions import SpeckleException, SpeckleWarning +from specklepy.transports.server.server import ServerTransport + + +class StreamWrapper: + """ + The `StreamWrapper` gives you some handy helpers to deal with urls and + get authenticated clients and transports. + + Construct a `StreamWrapper` with a stream, branch, commit, or object URL. + The corresponding ids will be stored + in the wrapper. If you have local accounts on the machine, + you can use the `get_account` and `get_client` methods + to get a local account for the server. You can also pass a token into `get_client` + if you don't have a corresponding + local account for the server. + + ```py + from specklepy.api.wrapper import StreamWrapper + + # provide any stream, branch, commit, object, or globals url + wrapper = StreamWrapper("https://speckle.xyz/streams/3073b96e86/commits/604bea8cc6") + + # get an authenticated SpeckleClient if you have a local account for the server + client = wrapper.get_client() + + # get an authenticated ServerTransport if you have a local account for the server + transport = wrapper.get_transport() + ``` + """ + + stream_url: str = None + use_ssl: bool = True + host: str = None + stream_id: str = None + commit_id: str = None + object_id: str = None + branch_name: str = None + _client: SpeckleClient = None + _account: Account = None + + def __repr__(self): + return ( + f"StreamWrapper( server: {self.host}, stream_id: {self.stream_id}, type:" + f" {self.type} )" + ) + + def __str__(self) -> str: + return self.__repr__() + + @property + def type(self) -> str: + if self.object_id: + return "object" + elif self.commit_id: + return "commit" + elif self.branch_name: + return "branch" + else: + return "stream" if self.stream_id else "invalid" + + def __init__(self, url: str) -> None: + self.stream_url = url + parsed = urlparse(url) + self.host = parsed.netloc + self.use_ssl = parsed.scheme == "https" + segments = parsed.path.strip("/").split("/", 3) + + if not segments or len(segments) < 2: + raise SpeckleException( + f"Cannot parse {url} into a stream wrapper class - invalid URL" + " provided." + ) + + while segments: + segment = segments.pop(0) + if segments and segment.lower() == "streams": + self.stream_id = segments.pop(0) + elif segments and segment.lower() == "commits": + self.commit_id = segments.pop(0) + elif segments and segment.lower() == "branches": + self.branch_name = unquote(segments.pop(0)) + elif segments and segment.lower() == "objects": + self.object_id = segments.pop(0) + elif segment.lower() == "globals": + self.branch_name = "globals" + if segments: + self.commit_id = segments.pop(0) + else: + raise SpeckleException( + f"Cannot parse {url} into a stream wrapper class - invalid URL" + " provided." + ) + + if not self.stream_id: + raise SpeckleException( + f"Cannot parse {url} into a stream wrapper class - no stream id found." + ) + + @property + def server_url(self): + return f"{'https' if self.use_ssl else 'http'}://{self.host}" + + def get_account(self, token: str = None) -> Account: + """ + Gets an account object for this server from the local accounts db + (added via Speckle Manager or a json file) + """ + if self._account and self._account.token: + return self._account + + self._account = next( + ( + a + for a in get_local_accounts() + if self.host == urlparse(a.serverInfo.url).netloc + ), + None, + ) + + if not self._account: + self._account = get_account_from_token(token, self.server_url) + + if self._client: + self._client.authenticate_with_account(self._account) + + return self._account + + def get_client(self, token: str = None) -> SpeckleClient: + """ + Gets an authenticated client for this server. + You may provide a token if there aren't any local accounts on this + machine. If no account is found and no token is provided, + an unauthenticated client is returned. + + Arguments: + token {str} + -- optional token if no local account is available (defaults to None) + + Returns: + SpeckleClient + -- authenticated with a corresponding local account or the provided token + """ + if self._client and token is None: + return self._client + + if not self._account or not self._account.token: + self.get_account(token) + + if not self._client: + self._client = SpeckleClient(host=self.host, use_ssl=self.use_ssl) + + if self._account.token is None and token is None: + warn(f"No local account found for server {self.host}", SpeckleWarning) + return self._client + + if self._account.token: + self._client.authenticate_with_account(self._account) + else: + self._client.authenticate_with_token(token) + + return self._client + + def get_transport(self, token: str = None) -> ServerTransport: + """ + Gets a server transport for this stream using an authenticated client. + If there is no local account for this + server and the client was not authenticated with a token, + this will throw an exception. + + Returns: + ServerTransport -- constructed for this stream + with a pre-authenticated client + """ + if not self._account or not self._account.token: + self.get_account(token) + return ServerTransport(self.stream_id, account=self._account) diff --git a/src/specklepy/core/helpers/speckle_path_provider.py b/src/specklepy/core/helpers/speckle_path_provider.py index c03b5ce6..9313836c 100644 --- a/src/specklepy/core/helpers/speckle_path_provider.py +++ b/src/specklepy/core/helpers/speckle_path_provider.py @@ -106,6 +106,18 @@ def user_speckle_folder_path() -> Path: return _ensure_folder_exists(user_application_data_path(), _application_name) +def user_speckle_connector_installation_path(host_application: str) -> Path: + """ + Gets a connector specific installation folder. + + In this folder we can put our connector installation and all python packages. + """ + return _ensure_folder_exists( + _ensure_folder_exists(user_speckle_folder_path(), "connector_installations"), + host_application, + ) + + def accounts_folder_path() -> Path: """Get the folder where the Speckle accounts data should be stored.""" return _ensure_folder_exists(user_speckle_folder_path(), _accounts_folder_name) diff --git a/src/specklepy/logging/metrics.py b/src/specklepy/logging/metrics.py index e06e4d1e..80fbc6b5 100644 --- a/src/specklepy/logging/metrics.py +++ b/src/specklepy/logging/metrics.py @@ -23,23 +23,25 @@ METRICS_TRACKER = None # actions +SDK = "SDK Action" +CONNECTOR = "Connector Action" RECEIVE = "Receive" SEND = "Send" -STREAM = "Stream Action" -PERMISSION = "Permission Action" -INVITE = "Invite Action" -COMMIT = "Commit Action" + +# not in use since 2.15 +ACCOUNTS = "Get Local Accounts" BRANCH = "Branch Action" -USER = "User Action" +CLIENT = "Speckle Client" +COMMIT = "Commit Action" +DESERIALIZE = "serialization/deserialize" +INVITE = "Invite Action" OTHER_USER = "Other User Action" +PERMISSION = "Permission Action" +SERIALIZE = "serialization/serialize" SERVER = "Server Action" -CLIENT = "Speckle Client" +STREAM = "Stream Action" STREAM_WRAPPER = "Stream Wrapper" - -ACCOUNTS = "Get Local Accounts" - -SERIALIZE = "serialization/serialize" -DESERIALIZE = "serialization/deserialize" +USER = "User Action" def disable(): @@ -96,7 +98,7 @@ def initialise_tracker(account=None): if account and account.userInfo.email: METRICS_TRACKER.set_last_user(account.userInfo.email) if account and account.serverInfo.url: - METRICS_TRACKER.set_last_server(account.userInfo.email) + METRICS_TRACKER.set_last_server(account.serverInfo.url) class Singleton(type): @@ -139,7 +141,9 @@ def set_last_server(self, server: str): self.last_server = self.hash(server) def hash(self, value: str): - return hashlib.md5(value.lower().encode("utf-8")).hexdigest().upper() + inputList = value.lower().split("://") + input = inputList[len(inputList)-1].split("/")[0].split('?')[0] + return hashlib.md5(input.encode("utf-8")).hexdigest().upper() def _send_tracking_requests(self): session = requests.Session() diff --git a/src/specklepy/objects/GIS/CRS.py b/src/specklepy/objects/GIS/CRS.py new file mode 100644 index 00000000..746f4429 --- /dev/null +++ b/src/specklepy/objects/GIS/CRS.py @@ -0,0 +1,16 @@ +from typing import Optional +from specklepy.objects import Base + + +class CRS(Base, speckle_type="Objects.GIS.CRS"): + """A Coordinate Reference System stored in wkt format""" + + name: Optional[str] = None + authority_id: Optional[str] = None + wkt: Optional[str] = None + units_native: Optional[str] = None + offset_x: Optional[float] = None + offset_y: Optional[float] = None + rotation: Optional[float] = None + + diff --git a/src/specklepy/objects/GIS/__init__.py b/src/specklepy/objects/GIS/__init__.py new file mode 100644 index 00000000..566eab99 --- /dev/null +++ b/src/specklepy/objects/GIS/__init__.py @@ -0,0 +1,22 @@ +"""Builtin Speckle object kit.""" + +from specklepy.objects.GIS.layers import ( + VectorLayer, + RasterLayer, +) + +from specklepy.objects.GIS.geometry import ( + GisPolygonGeometry, + GisPolygonElement, + GisLineElement, + GisPointElement, + GisRasterElement, +) + +from specklepy.objects.GIS.CRS import ( + CRS, +) + +__all__ = ["VectorLayer", "RasterLayer", + "GisPolygonGeometry", "GisPolygonElement", "GisLineElement", "GisPointElement", "GisRasterElement", + "CRS"] diff --git a/src/specklepy/objects/GIS/geometry.py b/src/specklepy/objects/GIS/geometry.py new file mode 100644 index 00000000..b692fa5e --- /dev/null +++ b/src/specklepy/objects/GIS/geometry.py @@ -0,0 +1,53 @@ + +from typing import Optional, Union, List +from specklepy.objects.geometry import Point, Line, Polyline, Circle, Arc, Polycurve, Mesh +from specklepy.objects import Base +from deprecated import deprecated + +class GisPolygonGeometry(Base, speckle_type="Objects.GIS.PolygonGeometry", detachable={"displayValue"}): + """GIS Polygon Geometry""" + + boundary: Optional[Union[Polyline, Arc, Line, Circle, Polycurve]] = None + voids: Optional[List[Union[Polyline, Arc, Line, Circle, Polycurve]] ] = None + displayValue: Optional[List[Mesh]] = None + +class GisPolygonElement(Base, speckle_type="Objects.GIS.PolygonElement"): + """GIS Polygon element""" + + geometry: Optional[List[GisPolygonGeometry]] = None + attributes: Optional[Base] = None + +class GisLineElement(Base, speckle_type="Objects.GIS.LineElement"): + """GIS Polyline element""" + + geometry: Optional[List[Union[Polyline, Arc, Line, Circle, Polycurve]]] = None, + attributes: Optional[Base] = None, + +class GisPointElement(Base, speckle_type="Objects.GIS.PointElement"): + """GIS Point element""" + + geometry: Optional[List[Point]] = None, + attributes: Optional[Base] = None, + +class GisRasterElement(Base, speckle_type="Objects.GIS.RasterElement", detachable={"displayValue"}): + """GIS Raster element""" + + band_count: Optional[int] = None + band_names: Optional[List[str]] = None + x_origin: Optional[float] = None + y_origin: Optional[float] = None + x_size: Optional[int] = None + y_size: Optional[int] = None + x_resolution: Optional[float] = None + y_resolution: Optional[float] = None + noDataValue: Optional[List[float]] = None + displayValue: Optional[List[Mesh]] = None + +class GisTopography(GisRasterElement, speckle_type="Objects.GIS.GisTopography", detachable={"displayValue"}): + """GIS Raster element with 3d Topography representation""" + +class GisNonGeometryElement(Base, speckle_type="Objects.GIS.NonGeometryElement"): + """GIS Table feature""" + + attributes: Optional[Base] = None + diff --git a/src/specklepy/objects/GIS/layers.py b/src/specklepy/objects/GIS/layers.py new file mode 100644 index 00000000..c5411b26 --- /dev/null +++ b/src/specklepy/objects/GIS/layers.py @@ -0,0 +1,138 @@ +from typing import Any, Dict, List, Union, Optional +from specklepy.objects.base import Base +from specklepy.objects.other import Collection + +from specklepy.objects.GIS.CRS import CRS +from deprecated import deprecated + +@deprecated(version="2.15", reason="Use VectorLayer or RasterLayer instead") +class Layer(Base, detachable={"features"}): + """A GIS Layer""" + def __init__( + self, + name:str=None, + crs:CRS=None, + units: str = "m", + features: Optional[List[Base]] = None, + layerType: str = "None", + geomType: str = "None", + renderer: Optional[dict[str, Any]] = None, + **kwargs + ) -> None: + super().__init__(**kwargs) + self.name = name + self.crs = crs + self.units = units + self.type = layerType + self.features = features or [] + self.geomType = geomType + self.renderer = renderer or {} + +@deprecated(version="2.16", reason="Use VectorLayer or RasterLayer instead") +class VectorLayer( + Collection, + detachable={"elements"}, + speckle_type="VectorLayer", + serialize_ignore={"features"}): + + """GIS Vector Layer""" + name: Optional[str]=None + crs: Optional[Union[CRS, Base]]=None + units: Optional[str] = None + elements: Optional[List[Base]] = None + attributes: Optional[Base] = None + geomType: Optional[str] = "None" + renderer: Optional[Dict[str, Any]] = None + collectionType = "VectorLayer" + + @property + @deprecated(version="2.14", reason="Use elements") + def features(self) -> Optional[List[Base]]: + return self.elements + + @features.setter + def features(self, value: Optional[List[Base]]) -> None: + self.elements = value + +@deprecated(version="2.16", reason="Use VectorLayer or RasterLayer instead") +class RasterLayer( + Collection, + detachable={"elements"}, + speckle_type="RasterLayer", + serialize_ignore={"features"}): + + """GIS Raster Layer""" + + name: Optional[str] = None + crs: Optional[Union[CRS, Base]]=None + units: Optional[str] = None + rasterCrs: Optional[Union[CRS, Base]]=None + elements: Optional[List[Base]] = None + geomType: Optional[str] = "None" + renderer: Optional[Dict[str, Any]] = None + collectionType = "RasterLayer" + + + @property + @deprecated(version="2.14", reason="Use elements") + def features(self) -> Optional[List[Base]]: + return self.elements + + @features.setter + def features(self, value: Optional[List[Base]]) -> None: + self.elements = value + + +class VectorLayer( + Collection, + detachable={"elements"}, + speckle_type="Objects.GIS.VectorLayer", + serialize_ignore={"features"}): + + """GIS Vector Layer""" + + name: Optional[str]=None + crs: Optional[Union[CRS, Base]]=None + units: Optional[str] = None + elements: Optional[List[Base]] = None + attributes: Optional[Base] = None + geomType: Optional[str] = "None" + renderer: Optional[Dict[str, Any]] = None + collectionType = "VectorLayer" + + @property + @deprecated(version="2.14", reason="Use elements") + def features(self) -> Optional[List[Base]]: + return self.elements + + @features.setter + def features(self, value: Optional[List[Base]]) -> None: + self.elements = value + +class RasterLayer( + Collection, + detachable={"elements"}, + speckle_type="Objects.GIS.RasterLayer", + serialize_ignore={"features"}): + + """GIS Raster Layer""" + + name: Optional[str] = None + crs: Optional[Union[CRS, Base]]=None + units: Optional[str] = None + rasterCrs: Optional[Union[CRS, Base]]=None + elements: Optional[List[Base]] = None + geomType: Optional[str] = "None" + renderer: Optional[Dict[str, Any]] = None + collectionType = "RasterLayer" + + + @property + @deprecated(version="2.14", reason="Use elements") + def features(self) -> Optional[List[Base]]: + return self.elements + + @features.setter + def features(self, value: Optional[List[Base]]) -> None: + self.elements = value + diff --git a/src/specklepy/objects/base.py b/src/specklepy/objects/base.py index b3189fa6..d198c2a1 100644 --- a/src/specklepy/objects/base.py +++ b/src/specklepy/objects/base.py @@ -5,6 +5,7 @@ Any, ClassVar, Dict, + ForwardRef, List, Optional, Set, @@ -17,7 +18,7 @@ from stringcase import pascalcase -from specklepy.logging.exceptions import SpeckleException +from specklepy.logging.exceptions import SpeckleException, SpeckleInvalidUnitException from specklepy.objects.units import Units, get_units_from_string from specklepy.transports.memory import MemoryTransport @@ -217,6 +218,9 @@ def _validate_type(t: Optional[type], value: Any) -> Tuple[bool, Any]: return True, t(value) if getattr(t, "__module__", None) == "typing": + if isinstance(t, ForwardRef): + return True, value + origin = getattr(t, "__origin__") # below is what in nicer for >= py38 # origin = get_origin(t) @@ -303,7 +307,7 @@ def _validate_type(t: Optional[type], value: Any) -> Tuple[bool, Any]: if isinstance(value, t): return True, value - with contextlib.suppress(ValueError): + with contextlib.suppress(ValueError, TypeError): if t is float and value is not None: return True, float(value) # TODO: dafuq, i had to add this not list check @@ -318,7 +322,7 @@ class Base(_RegisteringBase): id: Union[str, None] = None totalChildrenCount: Union[int, None] = None applicationId: Union[str, None] = None - _units: Union[Units, None] = None + _units: Union[None, str] = None def __init__(self, **kwargs) -> None: super().__init__() @@ -459,22 +463,19 @@ def add_detachable_attrs(self, names: Set[str]) -> None: @property def units(self) -> Union[str, None]: - if self._units: - return self._units.value - return None + return self._units @units.setter def units(self, value: Union[str, Units, None]): - if value is None: - units = value + """While this property accepts any string value, geometry expects units to be specific strings (see Units enum)""" + if isinstance(value, str) or value is None: + self._units = value elif isinstance(value, Units): - units: Units = value + self._units = value.value else: - units = get_units_from_string(value) - self._units = units - # except SpeckleInvalidUnitException as ex: - # warn(f"Units are reset to None. Reason {ex.message}") - # self._units = None + raise SpeckleInvalidUnitException( + f"Unknown type {type(value)} received for units" + ) def get_member_names(self) -> List[str]: """Get all of the property names on this object, dynamic or not""" diff --git a/src/specklepy/objects/geometry.py b/src/specklepy/objects/geometry.py index 469cc65e..922f3e14 100644 --- a/src/specklepy/objects/geometry.py +++ b/src/specklepy/objects/geometry.py @@ -39,7 +39,11 @@ def from_coords(cls, x: float = 0.0, y: float = 0.0, z: float = 0.0): return pt -class Pointcloud(Base, speckle_type=GEOMETRY + "Pointcloud"): +class Pointcloud( + Base, + speckle_type=GEOMETRY + "Pointcloud", + chunkable={"points": 31250, "colors": 62500, "sizes": 62500}, +): points: Optional[List[float]] = None colors: Optional[List[int]] = None sizes: Optional[List[float]] = None @@ -894,7 +898,7 @@ def Curve2DValues(self, value: List[float]): def VerticesValue(self) -> List[Point]: if self.Vertices is None: return None - encoded_unit = get_encoding_from_units(self.Vertices[0]._units) + encoded_unit = get_encoding_from_units(self.Vertices[0].units) values = [encoded_unit] for vertex in self.Vertices: values.extend(vertex.to_list()) @@ -909,7 +913,7 @@ def VerticesValue(self, value: List[float]): for i in range(0, len(value), 3): vertex = Point.from_list(value[i : i + 3]) - vertex._units = units + vertex.units = units vertices.append(vertex) self.Vertices = vertices diff --git a/src/specklepy/objects/graph_traversal/commit_object_builder.py b/src/specklepy/objects/graph_traversal/commit_object_builder.py new file mode 100644 index 00000000..2b8d1b3f --- /dev/null +++ b/src/specklepy/objects/graph_traversal/commit_object_builder.py @@ -0,0 +1,83 @@ +from abc import ABC, abstractmethod +from dataclasses import dataclass +from typing import Any, Collection, Dict, Generic, Iterable, List, Optional, Tuple, TypeVar +from attrs import define +from specklepy.objects.base import Base + +ROOT: str = "__Root" + +T = TypeVar('T') +PARENT_INFO = Tuple[Optional[str], str] + +@define(slots=True) +class CommitObjectBuilder(ABC, Generic[T]): + + converted: Dict[str, Base] + _parent_infos: Dict[str, Collection[PARENT_INFO]] + + def __init__(self) -> None: + self.converted = {} + self._parent_infos = {} + + @abstractmethod + def include_object(self, conversion_result: Base, native_object: T) -> None: + pass + + def build_commit_object(self, root_commit_object: Base) -> None: + self.apply_relationships(self.converted.values(), root_commit_object) + + def set_relationship(self, app_id: Optional[str], *parent_info : PARENT_INFO) -> None: + + if not app_id: + return + + self._parent_infos[app_id] = parent_info + + def apply_relationships(self, to_add: Iterable[Base], root_commit_object: Base) -> None: + for c in to_add: + try: + self.apply_relationship(c, root_commit_object) + except Exception as ex: + print(f"Failed to add object {type(c)} to commit object: {ex}") + + def apply_relationship(self, current: Base, root_commit_object: Base): + if not current.applicationId: raise Exception(f"Expected applicationId to have been set") + + parents = self._parent_infos[current.applicationId] + + for (parent_id, prop_name) in parents: + if not parent_id: continue + + parent: Optional[Base] + if parent_id == ROOT: + parent = root_commit_object + else: + parent = self.converted[parent_id] if parent_id in self.converted else None + + if not parent: continue + + try: + elements = get_detached_prop(parent, prop_name) + if not isinstance(elements, list): + elements = [] + set_detached_prop(parent, prop_name, elements) + + elements.append(current) + return + except Exception as ex: + # A parent was found, but it was invalid (Likely because of a type mismatch on a `elements` property) + print(f"Failed to add object {type(current)} to a converted parent; {ex}") + + raise Exception(f"Could not find a valid parent for object of type {type(current)}. Checked {len(parents)} potential parent, and non were converted!") + + +def get_detached_prop(speckle_object: Base, prop_name: str) -> Optional[Any]: + detached_prop_name = get_detached_prop_name(speckle_object, prop_name) + return getattr(speckle_object, detached_prop_name, None) + +def set_detached_prop(speckle_object: Base, prop_name: str, value: Optional[Any]) -> None: + detached_prop_name = get_detached_prop_name(speckle_object, prop_name) + setattr(speckle_object, detached_prop_name, value) + +def get_detached_prop_name(speckle_object: Base, prop_name: str) -> str: + return prop_name if hasattr(speckle_object, prop_name) else f"@{prop_name}" \ No newline at end of file diff --git a/src/specklepy/objects/graph_traversal/traversal.py b/src/specklepy/objects/graph_traversal/traversal.py new file mode 100644 index 00000000..0e2b3059 --- /dev/null +++ b/src/specklepy/objects/graph_traversal/traversal.py @@ -0,0 +1,123 @@ +from typing import Any, Callable, Collection, Iterable, Iterator, List, Optional, Set + +from attrs import define +from typing_extensions import Protocol, final + +from specklepy.objects import Base + + +class ITraversalRule(Protocol): + def get_members_to_traverse(self, o: Base) -> Set[str]: + """Get the members to traverse.""" + pass + + def does_rule_hold(self, o: Base) -> bool: + """Make sure the rule still holds.""" + pass + + +@final +@define(slots=True, frozen=True) +class DefaultRule: + def get_members_to_traverse(self, _) -> Set[str]: + return set() + + def does_rule_hold(self, _) -> bool: + return True + + +# we're creating a local protected "singleton" +_default_rule = DefaultRule() + + +@final +@define(slots=True, frozen=True) +class TraversalContext: + current: Base + member_name: Optional[str] = None + parent: Optional["TraversalContext"] = None + + +@final +@define(slots=True, frozen=True) +class GraphTraversal: + + _rules: List[ITraversalRule] + + def traverse(self, root: Base) -> Iterator[TraversalContext]: + stack: List[TraversalContext] = [] + + stack.append(TraversalContext(root)) + + while len(stack) > 0: + head = stack.pop() + yield head + + current = head.current + active_rule = self._get_active_rule_or_default_rule(current) + members_to_traverse = active_rule.get_members_to_traverse(current) + for child_prop in members_to_traverse: + try: + if child_prop in {"speckle_type", "units", "applicationId"}: continue #debug: to avoid noisy exceptions, explicitly avoid checking ones we know will fail, this is not exhaustive + if getattr(current, child_prop, None): + value = current[child_prop] + self._traverse_member_to_stack( + stack, value, child_prop, head + ) + except KeyError as ex: + # Unset application ids, and class variables like SpeckleType will throw when __getitem__ is called + pass + @staticmethod + def _traverse_member_to_stack( + stack: List[TraversalContext], + value: Any, + member_name: Optional[str] = None, + parent: Optional[TraversalContext] = None, + ): + if isinstance(value, Base): + stack.append(TraversalContext(value, member_name, parent)) + elif isinstance(value, list): + for obj in value: + GraphTraversal._traverse_member_to_stack(stack, obj, member_name, parent) + elif isinstance(value, dict): + for obj in value.values(): + GraphTraversal._traverse_member_to_stack(stack, obj, member_name, parent) + + @staticmethod + def traverse_member(value: Optional[Any]) -> Iterator[Base]: + if isinstance(value, Base): + yield value + elif isinstance(value, list): + for obj in value: + for o in GraphTraversal.traverse_member(obj): + yield o + elif isinstance(value, dict): + for obj in value.values(): + for o in GraphTraversal.traverse_member(obj): + yield o + + + def _get_active_rule_or_default_rule(self, o: Base) -> ITraversalRule: + return self._get_active_rule(o) or _default_rule + + def _get_active_rule(self, o: Base) -> Optional[ITraversalRule]: + for rule in self._rules: + if rule.does_rule_hold(o): + return rule + return None + + +@final +@define(slots=True, frozen=True) +class TraversalRule: + _conditions: Collection[Callable[[Base], bool]] + _members_to_traverse: Callable[[Base], Iterable[str]] + + def get_members_to_traverse(self, o: Base) -> Set[str]: + return set(self._members_to_traverse(o)) + + def does_rule_hold(self, o: Base) -> bool: + for condition in self._conditions: + if condition(o): + return True + return False \ No newline at end of file diff --git a/src/specklepy/objects/other.py b/src/specklepy/objects/other.py index 9ba3921a..3b9c1205 100644 --- a/src/specklepy/objects/other.py +++ b/src/specklepy/objects/other.py @@ -1,10 +1,12 @@ from typing import Any, List, Optional +from deprecated import deprecated from specklepy.objects.geometry import Point, Vector from .base import Base OTHER = "Objects.Other." +OTHER_REVIT = OTHER + "Revit." IDENTITY_TRANSFORM = [ 1.0, @@ -72,7 +74,7 @@ class DisplayStyle(Base, speckle_type=OTHER + "DisplayStyle"): class Transform( Base, speckle_type=OTHER + "Transform", - serialize_ignore={"translation", "scaling", "is_identity"}, + serialize_ignore={"translation", "scaling", "is_identity", "value"}, ): """The 4x4 transformation matrix @@ -84,12 +86,21 @@ class Transform( _value: Optional[List[float]] = None @property + @deprecated(version="2.12", reason="Use matrix") def value(self) -> List[float]: - """The transform matrix represented as a flat list of 16 floats""" return self._value @value.setter def value(self, value: List[float]) -> None: + self.matrix = value + + @property + def matrix(self) -> List[float]: + """The transform matrix represented as a flat list of 16 floats""" + return self._value + + @matrix.setter + def matrix(self, value: List[float]) -> None: try: value = [float(x) for x in value] except (ValueError, TypeError) as error: @@ -118,7 +129,7 @@ def scaling(self) -> List[float]: @property def is_identity(self) -> bool: - return self.value == IDENTITY_TRANSFORM + return self._value == IDENTITY_TRANSFORM def apply_to_point(self, point: Point) -> Point: """Transform a single speckle Point @@ -236,15 +247,38 @@ class BlockDefinition( geometry: Optional[List[Base]] = None -class BlockInstance( - Base, speckle_type=OTHER + "BlockInstance", detachable={"blockDefinition"} +class Instance( + Base, speckle_type=OTHER + "Instance", detachable={"definition"} ): - blockDefinition: Optional[BlockDefinition] = None transform: Optional[Transform] = None + definition: Optional[Base] = None +class BlockInstance( + Instance, speckle_type=OTHER + "BlockInstance", serialize_ignore={"blockDefinition"} +): + @property + @deprecated(version="2.13", reason="Use definition") + def blockDefinition(self) -> Optional[BlockDefinition]: + if isinstance(self.definition, BlockDefinition): + return self.definition + return None + + @blockDefinition.setter + def blockDefinition(self, value: Optional[BlockDefinition]) -> None: + self.definition = value + +class RevitInstance(Instance, speckle_type=OTHER_REVIT + "RevitInstance"): + level: Optional[Base] = None + facingFlipped: bool + handFlipped: bool + parameters: Optional[Base] = None + elementId: Optional[str] + # TODO: prob move this into a built elements module, but just trialling this for now -class RevitParameter(Base, speckle_type="Objects.BuiltElements.Revit.Parameter"): +class RevitParameter( + Base, speckle_type="Objects.BuiltElements.Revit.Parameter" +): name: Optional[str] = None value: Any = None applicationUnitType: Optional[str] = None # eg UnitType UT_Length @@ -255,3 +289,10 @@ class RevitParameter(Base, speckle_type="Objects.BuiltElements.Revit.Parameter") isShared: bool = False isReadOnly: bool = False isTypeParameter: bool = False + +class Collection( + Base, speckle_type="Speckle.Core.Models.Collection", detachable={"elements"} +): + name: Optional[str] = None + collectionType: Optional[str] = None + elements: Optional[List[Base]] = None \ No newline at end of file diff --git a/src/specklepy/objects/structural/properties.py b/src/specklepy/objects/structural/properties.py index 65810c88..e0153a1e 100644 --- a/src/specklepy/objects/structural/properties.py +++ b/src/specklepy/objects/structural/properties.py @@ -65,6 +65,7 @@ class ShapeType(int, Enum): Box = 7 Catalogue = 8 Explicit = 9 + Undefined = 10 class PropertyTypeSpring(int, Enum): @@ -90,7 +91,9 @@ class Property(Base, speckle_type=STRUCTURAL_PROPERTY): name: Optional[str] = None -class SectionProfile(Base, speckle_type=STRUCTURAL_PROPERTY + ".Profiles.SectionProfile"): +class SectionProfile( + Base, speckle_type=STRUCTURAL_PROPERTY + ".Profiles.SectionProfile" +): name: Optional[str] = None shapeType: Optional[ShapeType] = None area: float = 0.0 diff --git a/src/specklepy/objects/units.py b/src/specklepy/objects/units.py index e89dbc71..d795738c 100644 --- a/src/specklepy/objects/units.py +++ b/src/specklepy/objects/units.py @@ -35,6 +35,7 @@ class Units(Enum): Units.none: ["none", "null"], } + UNITS_ENCODINGS = { Units.none: 0, None: 0, @@ -49,6 +50,20 @@ class Units(Enum): } +UNIT_SCALE = { + Units.none: 1, + Units.mm: 0.001, + Units.cm: 0.01, + Units.m: 1.0, + Units.km: 1000.0, + Units.inches: 0.0254, + Units.feet: 0.3048, + Units.yards: 0.9144, + Units.miles: 1609.340, +} +"""Unit scaling factor to meters""" + + def get_units_from_string(unit: str) -> Units: if not isinstance(unit, str): raise SpeckleInvalidUnitException(unit) @@ -59,10 +74,10 @@ def get_units_from_string(unit: str) -> Units: raise SpeckleInvalidUnitException(unit) -def get_units_from_encoding(unit: int): +def get_units_from_encoding(unit: int) -> Units: for name, encoding in UNITS_ENCODINGS.items(): if unit == encoding: - return name + return name or Units.none raise SpeckleException( message=( @@ -72,13 +87,36 @@ def get_units_from_encoding(unit: int): ) -def get_encoding_from_units(unit: Union[Units, None]): +def get_encoding_from_units(unit: Union[Units, str, None]): + maybe_sanitized_unit = unit + if isinstance(unit, str): + for unit_enum, aliases in UNITS_STRINGS.items(): + if unit in aliases: + maybe_sanitized_unit = unit_enum try: - return UNITS_ENCODINGS[unit] + return UNITS_ENCODINGS[maybe_sanitized_unit] except KeyError as e: raise SpeckleException( message=( - f"No encoding exists for unit {unit}." + f"No encoding exists for unit {maybe_sanitized_unit}." f"Please enter a valid unit to encode (eg {UNITS_ENCODINGS})." ) ) from e + + +def get_scale_factor_from_string(fromUnits: str, toUnits: str) -> float: + """Returns a scalar to convert distance values from one unit system to another""" + return get_scale_factor(get_units_from_string(fromUnits), get_units_from_string(toUnits)) + + +def get_scale_factor(fromUnits: Units, toUnits: Units) -> float: + """Returns a scalar to convert distance values from one unit system to another""" + return get_scale_factor_to_meters(fromUnits) / get_scale_factor_to_meters(toUnits) + + +def get_scale_factor_to_meters(fromUnits: Units) -> float: + """Returns a scalar to convert distance values from one unit system to meters""" + if fromUnits not in UNIT_SCALE: + raise ValueError(f"Invalid units provided: {fromUnits}") + + return UNIT_SCALE[fromUnits] \ No newline at end of file diff --git a/src/specklepy/transports/abstract_transport.py b/src/specklepy/transports/abstract_transport.py index 5e7a056a..e3eb973c 100644 --- a/src/specklepy/transports/abstract_transport.py +++ b/src/specklepy/transports/abstract_transport.py @@ -1,16 +1,12 @@ from abc import ABC, abstractmethod from typing import Dict, List, Optional -from pydantic import BaseModel -from pydantic.config import Extra - - -class AbstractTransport(ABC, BaseModel): - _name: str = "Abstract" +class AbstractTransport(ABC): @property + @abstractmethod def name(self): - return type(self)._name + pass @abstractmethod def begin_write(self) -> None: @@ -87,7 +83,3 @@ def copy_object_and_children( str -- the string representation of the root object """ pass - - class Config: - extra = Extra.allow - arbitrary_types_allowed = True diff --git a/src/specklepy/transports/memory.py b/src/specklepy/transports/memory.py index 530dd5be..43cdff60 100644 --- a/src/specklepy/transports/memory.py +++ b/src/specklepy/transports/memory.py @@ -4,14 +4,15 @@ class MemoryTransport(AbstractTransport): - _name: str = "Memory" - objects: dict = {} - saved_object_count: int = 0 - - def __init__(self, name=None, **data: Any) -> None: - super().__init__(**data) - if name: - self._name = name + def __init__(self, name="Memory") -> None: + super().__init__() + self._name = name + self.objects = {} + self.saved_object_count = 0 + + @property + def name(self) -> str: + return self._name def __repr__(self) -> str: return f"MemoryTransport(objects: {len(self.objects)})" diff --git a/src/specklepy/transports/server/batch_sender.py b/src/specklepy/transports/server/batch_sender.py index 45daf1a5..a9e9a377 100644 --- a/src/specklepy/transports/server/batch_sender.py +++ b/src/specklepy/transports/server/batch_sender.py @@ -18,6 +18,7 @@ def __init__( stream_id, token, max_batch_size_mb=1, + max_batch_length=20000, batch_buffer_length=10, thread_count=4, ): @@ -26,6 +27,7 @@ def __init__( self._token = token self.max_size = int(max_batch_size_mb * 1000 * 1000) + self.max_batch_length = int(max_batch_length) self._batches = queue.Queue(batch_buffer_length) self._crt_batch = [] self._crt_batch_size = 0 @@ -39,7 +41,11 @@ def send_object(self, id: str, obj: str): self._create_threads() crt_obj_size = len(obj) - if not self._crt_batch or self._crt_batch_size + crt_obj_size < self.max_size: + crt_batch_length = len(self._crt_batch) + if not self._crt_batch or ( + self._crt_batch_size + crt_obj_size < self.max_size + and crt_batch_length < self.max_batch_length + ): self._crt_batch.append((id, obj)) self._crt_batch_size += crt_obj_size return @@ -90,17 +96,18 @@ def _sending_thread_main(self): self._exception = self._exception or ex LOG.error("ServerTransport sending thread error: " + str(ex)) - def _bg_send_batch(self, session, batch): + def _bg_send_batch(self, session: requests.Session, batch): object_ids = [obj[0] for obj in batch] - try: - server_has_object = session.post( - url=f"{self.server_url}/api/diff/{self.stream_id}", - data={"objects": json.dumps(object_ids)}, - ).json() - except Exception as ex: + response = session.post( + url=f"{self.server_url}/api/diff/{self.stream_id}", + data={"objects": json.dumps(object_ids)}, + ) + if response.status_code == 403: raise SpeckleException( f"Invalid credentials - cannot send objects to server {self.server_url}" - ) from ex + ) + response.raise_for_status() + server_has_object = response.json() new_object_ids = [x for x in object_ids if not server_has_object[x]] new_object_ids = set(new_object_ids) @@ -130,7 +137,7 @@ def _bg_send_batch(self, session, batch): raise SpeckleException( message=( "Could not save the object to the server - status code" - f" {r.status_code}" + f" {r.status_code} ({r.text[:1000]})" ) ) except json.JSONDecodeError as error: diff --git a/src/specklepy/transports/server/server.py b/src/specklepy/transports/server/server.py index 7ea2e713..019564fe 100644 --- a/src/specklepy/transports/server/server.py +++ b/src/specklepy/transports/server/server.py @@ -4,8 +4,8 @@ import requests -from specklepy.api.client import SpeckleClient -from specklepy.api.credentials import Account, get_account_from_token +from specklepy.core.api.client import SpeckleClient +from specklepy.core.api.credentials import Account, get_account_from_token from specklepy.logging.exceptions import SpeckleException, SpeckleWarning from specklepy.transports.abstract_transport import AbstractTransport @@ -45,13 +45,6 @@ class ServerTransport(AbstractTransport): ``` """ - _name = "RemoteTransport" - url: Optional[str] = None - stream_id: Optional[str] = None - account: Optional[Account] = None - saved_obj_count: int = 0 - session: Optional[requests.Session] = None - def __init__( self, stream_id: str, @@ -59,15 +52,18 @@ def __init__( account: Optional[Account] = None, token: Optional[str] = None, url: Optional[str] = None, - **data: Any, + name: str = "RemoteTransport", ) -> None: - super().__init__(**data) + super().__init__() if client is None and account is None and token is None and url is None: raise SpeckleException( "You must provide either a client or a token and url to construct a" " ServerTransport." ) + self._name = name + self.account = None + self.saved_obj_count = 0 if account: self.account = account url = account.serverInfo.url @@ -97,6 +93,10 @@ def __init__( {"Authorization": f"Bearer {self.account.token}", "Accept": "text/plain"} ) + @property + def name(self) -> str: + return self._name + def begin_write(self) -> None: self.saved_obj_count = 0 diff --git a/src/specklepy/transports/sqlite.py b/src/specklepy/transports/sqlite.py index 0dc64a55..bcdd4758 100644 --- a/src/specklepy/transports/sqlite.py +++ b/src/specklepy/transports/sqlite.py @@ -9,31 +9,22 @@ class SQLiteTransport(AbstractTransport): - _name = "SQLite" - _base_path: Optional[str] = None - _root_path: Optional[str] = None - __connection: Optional[sqlite3.Connection] = None - app_name: str = "" - scope: str = "" - saved_obj_count: int = 0 - max_size: Optional[int] = None - _current_batch: Optional[List[Tuple[str, str]]] = None - _current_batch_size: Optional[int] = None - def __init__( self, base_path: Optional[str] = None, app_name: Optional[str] = None, scope: Optional[str] = None, max_batch_size_mb: float = 10.0, - **data: Any, + name: str = "SQLite", ) -> None: - super().__init__(**data) + super().__init__() + self._name = name self.app_name = app_name or "Speckle" self.scope = scope or "Objects" self._base_path = base_path or self.get_base_path(self.app_name) self.max_size = int(max_batch_size_mb * 1000 * 1000) - self._current_batch = [] + self.saved_obj_count = 0 + self._current_batch: List[Tuple[str, str]] = [] self._current_batch_size = 0 try: @@ -54,24 +45,12 @@ def __init__( def __repr__(self) -> str: return f"SQLiteTransport(app: '{self.app_name}', scope: '{self.scope}')" + @property + def name(self) -> str: + return self._name + @staticmethod def get_base_path(app_name): - # # from appdirs https://github.com/ActiveState/appdirs/blob/master/appdirs.py - # # default mac path is not the one we use (we use unix path), so using special case for this - # system = sys.platform - # if system.startswith("java"): - # import platform - - # os_name = platform.java_ver()[3][0] - # if os_name.startswith("Mac"): - # system = "darwin" - - # if system != "darwin": - # return user_data_dir(appname=app_name, appauthor=False, roaming=True) - - # path = os.path.expanduser("~/.config/") - # return os.path.join(path, app_name) - return str( speckle_path_provider.user_application_data_path().joinpath(app_name) ) diff --git a/tests/intergration/conftest.py b/tests/intergration/conftest.py index 225ae9bf..cd576b47 100644 --- a/tests/intergration/conftest.py +++ b/tests/intergration/conftest.py @@ -1,5 +1,6 @@ import random import uuid +from urllib.parse import parse_qs, urlparse import pytest import requests @@ -38,7 +39,8 @@ def seed_user(host): ) if not r.ok: raise Exception(f"Cannot seed user: {r.reason}") - access_code = r.text.split("access_code=")[1] + redirect_url = urlparse(r.headers.get("location")) + access_code = parse_qs(redirect_url.query)["access_code"][0] # type: ignore r_tokens = requests.post( url=f"http://{host}/auth/token", diff --git a/tests/intergration/test_active_user.py b/tests/intergration/test_active_user.py index 8f3ff3fa..755a8cb3 100644 --- a/tests/intergration/test_active_user.py +++ b/tests/intergration/test_active_user.py @@ -39,7 +39,7 @@ def test_user_activity(self, client: SpeckleClient, second_user_dict): assert my_activity.totalCount assert isinstance(their_activity, ActivityCollection) - older_activity = client.user.activity(before=my_activity.items[0].time) + older_activity = client.active_user.activity(before=my_activity.items[0].time) assert isinstance(older_activity, ActivityCollection) assert older_activity.totalCount diff --git a/tests/intergration/test_serialization.py b/tests/intergration/test_serialization.py index 6f444795..85bcd927 100644 --- a/tests/intergration/test_serialization.py +++ b/tests/intergration/test_serialization.py @@ -17,7 +17,7 @@ def test_serialize(self, base): deserialized = operations.deserialize(serialized) assert base.get_id() == deserialized.get_id() - assert base.units == "mm" + assert base.units == "millimetres" assert isinstance(base.test_bases[0], Base) assert base["@revit_thing"].speckle_type == "SpecialRevitFamily" assert base["@detach"].name == deserialized["@detach"].name diff --git a/tests/intergration/test_stream.py b/tests/intergration/test_stream.py index b9e06153..4c8e4c26 100644 --- a/tests/intergration/test_stream.py +++ b/tests/intergration/test_stream.py @@ -37,7 +37,7 @@ def updated_stream( @pytest.fixture(scope="module") def second_user(self, second_client: SpeckleClient): - return second_client.user.get() + return second_client.active_user.get() def test_stream_create(self, client, stream, updated_stream): stream.id = updated_stream.id = client.stream.create( @@ -93,15 +93,6 @@ def test_stream_favorite(self, client, stream): assert isinstance(favorited, Stream) assert unfavorited.favoritedDate is None - def test_stream_grant_permission(self, client, stream, second_user): - # deprecated as of Speckle Server 2.6.4 - with pytest.raises(UnsupportedException): - client.stream.grant_permission( - stream_id=stream.id, - user_id=second_user.id, - role="stream:contributor", - ) - def test_stream_invite( self, client: SpeckleClient, stream: Stream, second_user_dict: dict ): @@ -122,18 +113,18 @@ def test_stream_invite_get_all_for_user( self, second_client: SpeckleClient, stream: Stream ): # NOTE: these are user queries, but testing here to contain the flow - invites = second_client.user.get_all_pending_invites() + invites = second_client.active_user.get_all_pending_invites() assert isinstance(invites, list) assert isinstance(invites[0], PendingStreamCollaborator) assert len(invites) == 1 - invite = second_client.user.get_pending_invite(stream_id=stream.id) + invite = second_client.active_user.get_pending_invite(stream_id=stream.id) assert isinstance(invite, PendingStreamCollaborator) def test_stream_invite_use(self, second_client: SpeckleClient, stream: Stream): invite: PendingStreamCollaborator = ( - second_client.user.get_all_pending_invites()[0] + second_client.active_user.get_all_pending_invites()[0] ) accepted = second_client.stream.invite_use( diff --git a/tests/unit/test_base.py b/tests/unit/test_base.py index f653a3a4..1dc87f35 100644 --- a/tests/unit/test_base.py +++ b/tests/unit/test_base.py @@ -85,14 +85,15 @@ def test_speckle_type_cannot_be_set(base: Base) -> None: def test_setting_units(): b = Base(units="foot") - assert b.units == "ft" + assert b.units == "foot" - with pytest.raises(SpeckleInvalidUnitException): - b.units = "big" + # with pytest.raises(SpeckleInvalidUnitException): + b.units = "big" + assert b.units == "big" with pytest.raises(SpeckleInvalidUnitException): b.units = 7 # invalid args are skipped - assert b.units == "ft" + assert b.units == "big" b.units = None # None should be a valid arg assert b.units is None diff --git a/tests/unit/test_geometry.py b/tests/unit/test_geometry.py index 6b3637b3..388ddcf7 100644 --- a/tests/unit/test_geometry.py +++ b/tests/unit/test_geometry.py @@ -388,9 +388,9 @@ def test_brep_curve3d_values_serialization(curve, polyline, circle): def test_brep_vertices_values_serialization(): brep = Brep() brep.VerticesValue = [1, 1, 1, 1, 2, 2, 2, 3, 3, 3] - assert brep.Vertices[0].get_id() == Point(x=1, y=1, z=1, _units=Units.mm).get_id() - assert brep.Vertices[1].get_id() == Point(x=2, y=2, z=2, _units=Units.mm).get_id() - assert brep.Vertices[2].get_id() == Point(x=3, y=3, z=3, _units=Units.mm).get_id() + assert brep.Vertices[0].get_id() == Point(x=1, y=1, z=1, units=Units.mm).get_id() + assert brep.Vertices[1].get_id() == Point(x=2, y=2, z=2, units=Units.mm).get_id() + assert brep.Vertices[2].get_id() == Point(x=3, y=3, z=3, units=Units.mm).get_id() def test_trims_value_serialization(): diff --git a/tests/unit/test_graph_traversal.py b/tests/unit/test_graph_traversal.py new file mode 100644 index 00000000..df8cf1bf --- /dev/null +++ b/tests/unit/test_graph_traversal.py @@ -0,0 +1,105 @@ +from dataclasses import dataclass +from typing import Dict, List, Optional +from unittest import TestCase + +from specklepy.objects import Base +from specklepy.objects.graph_traversal.traversal import GraphTraversal, TraversalRule + + +@dataclass() +class TraversalMock(Base): + child: Optional[Base] + list_children: List[Base] + dict_children: Dict[str, Base] + + +class GraphTraversalTests(TestCase): + def test_traverse_list_members(self): + traverse_lists_rule = TraversalRule( + [lambda _: True], + lambda x: [ + item + for item in x.get_member_names() + if isinstance(getattr(x, item, None), list) + ], + ) + + expected_traverse = Base() + expected_traverse.id = "List Member" + + expected_ignore = Base() + expected_ignore.id = "Not List Member" + + test_case = TraversalMock( + list_children=[expected_traverse], + dict_children={"myprop": expected_ignore}, + child=expected_ignore, + ) + + ret = [ + context.current + for context in GraphTraversal([traverse_lists_rule]).traverse(test_case) + ] + + self.assertCountEqual(ret, [test_case, expected_traverse]) + self.assertNotIn(expected_ignore, ret) + self.assertEqual(len(ret), 2) + + def test_traverse_dict_members(self): + traverse_lists_rule = TraversalRule( + [lambda _: True], + lambda x: [ + item + for item in x.get_member_names() + if isinstance(getattr(x, item, None), dict) + ], + ) + + expected_traverse = Base() + expected_traverse.id = "Dict Member" + + expected_ignore = Base() + expected_ignore.id = "Not Dict Member" + + test_case = TraversalMock( + list_children=[expected_ignore], + dict_children={"myprop": expected_traverse}, + child=expected_ignore, + ) + + ret = [ + context.current + for context in GraphTraversal([traverse_lists_rule]).traverse(test_case) + ] + + self.assertCountEqual(ret, [test_case, expected_traverse]) + self.assertNotIn(expected_ignore, ret) + self.assertEqual(len(ret), 2) + + def test_traverse_dynamic(self): + traverse_lists_rule = TraversalRule( + [lambda _: True], lambda x: x.get_dynamic_member_names() + ) + + expected_traverse = Base() + expected_traverse.id = "List Member" + + expected_ignore = Base() + expected_ignore.id = "Not List Member" + + test_case = TraversalMock( + child=expected_ignore, + list_children=[expected_ignore], + dict_children={"myprop": expected_ignore}, + ) + test_case["dynamicChild"] = expected_traverse + test_case["dynamicListChild"] = [expected_traverse] + + ret = [ + context.current + for context in GraphTraversal([traverse_lists_rule]).traverse(test_case) + ] + + self.assertCountEqual(ret, [test_case, expected_traverse, expected_traverse]) + self.assertNotIn(expected_ignore, ret) + self.assertEqual(len(ret), 3) \ No newline at end of file diff --git a/tests/unit/test_path.py b/tests/unit/test_path.py index c5e8490f..c7ce287a 100644 --- a/tests/unit/test_path.py +++ b/tests/unit/test_path.py @@ -44,3 +44,12 @@ def test_accounts_folder_name_override(): speckle_path_provider.override_accounts_folder_name(new_folder_name) assert speckle_path_provider._accounts_folder_name == new_folder_name speckle_path_provider.override_accounts_folder_name(old_folder_name) + + +def test_connector_installation_path(): + host_application = "test application" + connector_path = speckle_path_provider.user_speckle_connector_installation_path( + host_application + ) + assert "connector_installations" in str(connector_path) + assert str(connector_path).endswith(host_application) diff --git a/tests/unit/test_type_validation.py b/tests/unit/test_type_validation.py index 6411234f..5ca9aece 100644 --- a/tests/unit/test_type_validation.py +++ b/tests/unit/test_type_validation.py @@ -106,6 +106,9 @@ def __init__(self, foo: str) -> None: True, fake_bases, ), + (List["int"], [2, 3, 4], True, [2, 3, 4]), + (Union[float, Dict[str, float]], {"foo": 1, "bar": 2}, True, {"foo": 1.0, "bar": 2.0}), + (Union[float, Dict[str, float]], {"foo": "bar"}, False, {"foo": "bar"}), ], ) def test_validate_type( diff --git a/tests/unit/test_unit_scaling.py b/tests/unit/test_unit_scaling.py new file mode 100644 index 00000000..34627462 --- /dev/null +++ b/tests/unit/test_unit_scaling.py @@ -0,0 +1,56 @@ + + +import pytest + +from specklepy.objects.units import Units, get_scale_factor + + +@pytest.mark.parametrize( + "fromUnits, toUnits, inValue, expectedOutValue", + [ + #To self + (Units.km, Units.km, 1.5, 1.5), + (Units.km, Units.km, 0, 0), + (Units.m, Units.m, 1.5, 1.5), + (Units.m, Units.m, 0, 0), + (Units.cm, Units.cm, 1.5, 1.5), + (Units.cm, Units.cm, 0, 0), + (Units.mm, Units.mm, 1.5, 1.5), + (Units.mm, Units.mm, 0, 0), + (Units.miles, Units.miles, 1.5, 1.5), + (Units.miles, Units.miles, 0, 0), + (Units.yards, Units.yards, 1.5, 1.5), + (Units.yards, Units.yards, 0, 0), + (Units.feet, Units.feet, 1.5, 1.5), + (Units.feet, Units.feet, 0, 0), + + #To Meters + (Units.km, Units.m, 987654.321, 987654321), + (Units.m, Units.m, 987654.321, 987654.321), + (Units.mm, Units.m, 98765432.1, 98765.4321), + (Units.cm, Units.m, 9876543.21, 98765.4321), + + #To negative meters + (Units.km, Units.m, -987654.321, -987654321), + (Units.m, Units.m,- 987654.321, -987654.321), + (Units.mm, Units.m, -98765432.1, -98765.4321), + (Units.cm, Units.m, -9876543.21, -98765.4321), + + (Units.m, Units.km, 987654.321, 987.654321), + (Units.m, Units.cm, 987654.321, 98765432.1), + (Units.m, Units.mm, 987654.321, 987654321), + + #Imperial + (Units.miles, Units.m, 123.45, 198673.517), + (Units.miles, Units.inches, 123.45, 7821792), + (Units.yards, Units.m, 123.45, 112.88268), + (Units.yards, Units.inches, 123.45, 4444.2), + (Units.feet, Units.m, 123.45, 37.62756), + (Units.feet, Units.inches, 123.45, 1481.4), + (Units.inches, Units.m, 123.45, 3.13563), + ], +) +def test_get_scale_factor_between_units(fromUnits: Units, toUnits: Units, inValue: float, expectedOutValue: float): + Tolerance = 1e-10 + actual = inValue * get_scale_factor(fromUnits, toUnits) + assert(actual - expectedOutValue < Tolerance) diff --git a/utils/installer.py b/utils/installer.py new file mode 100644 index 00000000..9f22cc8f --- /dev/null +++ b/utils/installer.py @@ -0,0 +1,207 @@ +""" +Provides uniform and consistent path helpers for `specklepy` +""" +import os +import sys +from pathlib import Path +from typing import Optional +from importlib import import_module, invalidate_caches + +_user_data_env_var = "SPECKLE_USERDATA_PATH" + + +def _path() -> Optional[Path]: + """Read the user data path override setting.""" + path_override = os.environ.get(_user_data_env_var) + if path_override: + return Path(path_override) + return None + + +_application_name = "Speckle" + + +def override_application_name(application_name: str) -> None: + """Override the global Speckle application name.""" + global _application_name + _application_name = application_name + + +def override_application_data_path(path: Optional[str]) -> None: + """ + Override the global Speckle application data path. + + If the value of path is `None` the environment variable gets deleted. + """ + if path: + os.environ[_user_data_env_var] = path + else: + os.environ.pop(_user_data_env_var, None) + + +def _ensure_folder_exists(base_path: Path, folder_name: str) -> Path: + path = base_path.joinpath(folder_name) + path.mkdir(exist_ok=True, parents=True) + return path + + +def user_application_data_path() -> Path: + """Get the platform specific user configuration folder path""" + path_override = _path() + if path_override: + return path_override + + try: + if sys.platform.startswith("win"): + app_data_path = os.getenv("APPDATA") + if not app_data_path: + raise Exception( + "Cannot get appdata path from environment." + ) + return Path(app_data_path) + else: + # try getting the standard XDG_DATA_HOME value + # as that is used as an override + app_data_path = os.getenv("XDG_DATA_HOME") + if app_data_path: + return Path(app_data_path) + else: + return _ensure_folder_exists(Path.home(), ".config") + except Exception as ex: + raise Exception( + "Failed to initialize user application data path.", ex + ) + + +def user_speckle_folder_path() -> Path: + """Get the folder where the user's Speckle data should be stored.""" + return _ensure_folder_exists(user_application_data_path(), _application_name) + + +def user_speckle_connector_installation_path(host_application: str) -> Path: + """ + Gets a connector specific installation folder. + + In this folder we can put our connector installation and all python packages. + """ + return _ensure_folder_exists( + _ensure_folder_exists(user_speckle_folder_path(), "connector_installations"), + host_application, + ) + + + + + + +print("Starting module dependency installation") +print(sys.executable) + +PYTHON_PATH = sys.executable + + + +def connector_installation_path(host_application: str) -> Path: + connector_installation_path = user_speckle_connector_installation_path(host_application) + connector_installation_path.mkdir(exist_ok=True, parents=True) + + # set user modules path at beginning of paths for earlier hit + if sys.path[0] != connector_installation_path: + sys.path.insert(0, str(connector_installation_path)) + + print(f"Using connector installation path {connector_installation_path}") + return connector_installation_path + + + +def is_pip_available() -> bool: + try: + import_module("pip") # noqa F401 + return True + except ImportError: + return False + + +def ensure_pip() -> None: + print("Installing pip... ") + + from subprocess import run + + completed_process = run([PYTHON_PATH, "-m", "ensurepip"]) + + if completed_process.returncode == 0: + print("Successfully installed pip") + else: + raise Exception(f"Failed to install pip, got {completed_process.returncode} return code") + + +def get_requirements_path() -> Path: + # we assume that a requirements.txt exists next to the __init__.py file + path = Path(Path(__file__).parent, "requirements.txt") + assert path.exists() + return path + + +def install_requirements(host_application: str) -> None: + # set up addons/modules under the user + # script path. Here we'll install the + # dependencies + path = connector_installation_path(host_application) + print(f"Installing Speckle dependencies to {path}") + + from subprocess import run + + completed_process = run( + [ + PYTHON_PATH, + "-m", + "pip", + "install", + "-t", + str(path), + "-r", + str(get_requirements_path()), + ], + capture_output=True, + text=True, + ) + + if completed_process.returncode != 0: + m = f"Failed to install dependenices through pip, got {completed_process.returncode} return code" + print(m) + raise Exception(m) + + +def install_dependencies(host_application: str) -> None: + if not is_pip_available(): + ensure_pip() + + install_requirements(host_application) + + +def _import_dependencies() -> None: + import_module("specklepy") + # the code above doesn't work for now, it fails on importing graphql-core + # despite that, the connector seams to be working as expected + # But it would be nice to make this solution work + # it would ensure that all dependencies are fully loaded + # requirements = get_requirements_path().read_text() + # reqs = [ + # req.split(" ; ")[0].split("==")[0].split("[")[0].replace("-", "_") + # for req in requirements.split("\n") + # if req and not req.startswith(" ") + # ] + # for req in reqs: + # print(req) + # import_module("specklepy") + +def ensure_dependencies(host_application: str) -> None: + try: + install_dependencies(host_application) + invalidate_caches() + _import_dependencies() + print("Successfully found dependencies") + except ImportError: + raise Exception(f"Cannot automatically ensure Speckle dependencies. Please try restarting the host application {host_application}!") + +